Merge pull request #258 from ylei-tsubame/web-ui-bundle-20221125

Web UI bundle 20221125
main
Digimer 2 years ago committed by GitHub
commit f9c5a30ea4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 7
      Anvil/Tools.pm
  2. 11
      Anvil/Tools/Cluster.pm
  3. 5
      striker-ui-api/.eslintignore
  4. 14
      striker-ui-api/.eslintrc.json
  5. 5
      striker-ui-api/.prettierignore
  6. 18
      striker-ui-api/app.js
  7. 94
      striker-ui-api/lib/accessDB.js
  8. 3
      striker-ui-api/lib/consts/API_ROOT_PATH.js
  9. 34
      striker-ui-api/lib/consts/SERVER_PATHS.js
  10. 29
      striker-ui-api/lib/request_handlers/files/buildGetFiles.js
  11. 25
      striker-ui-api/lib/request_handlers/files/getFileDetail.js
  12. 13
      striker-ui-api/lib/request_handlers/files/getFilesOverview.js
  13. 2
      striker-ui-api/out/index.js
  14. 4108
      striker-ui-api/package-lock.json
  15. 21
      striker-ui-api/package.json
  16. 8
      striker-ui-api/src/.eslintrc.json
  17. 18
      striker-ui-api/src/app.ts
  18. 4
      striker-ui-api/src/index.ts
  19. 207
      striker-ui-api/src/lib/accessModule.ts
  20. 38
      striker-ui-api/src/lib/buildCondition.ts
  21. 14
      striker-ui-api/src/lib/buildQueryResultModifier.ts
  22. 7
      striker-ui-api/src/lib/call.ts
  23. 4
      striker-ui-api/src/lib/cap.ts
  24. 3
      striker-ui-api/src/lib/consts/API_ROOT_PATH.ts
  25. 1
      striker-ui-api/src/lib/consts/HOST_KEY_CHANGED_PREFIX.ts
  26. 1
      striker-ui-api/src/lib/consts/LOCAL.ts
  27. 4
      striker-ui-api/src/lib/consts/NODE_AND_DR_RESERVED_MEMORY_SIZE.ts
  28. 26
      striker-ui-api/src/lib/consts/OS_LIST.ts
  29. 23
      striker-ui-api/src/lib/consts/REG_EXP_PATTERNS.ts
  30. 54
      striker-ui-api/src/lib/consts/SERVER_PATHS.ts
  31. 2
      striker-ui-api/src/lib/consts/SERVER_PORT.ts
  32. 13
      striker-ui-api/src/lib/convertHostUUID.ts
  33. 2
      striker-ui-api/src/lib/getShortHostName.ts
  34. 27
      striker-ui-api/src/lib/join.ts
  35. 5
      striker-ui-api/src/lib/match.ts
  36. 323
      striker-ui-api/src/lib/request_handlers/anvil/buildQueryAnvilDetail.ts
  37. 81
      striker-ui-api/src/lib/request_handlers/anvil/getAnvil.ts
  38. 34
      striker-ui-api/src/lib/request_handlers/buildBranchRequestHandler.ts
  39. 64
      striker-ui-api/src/lib/request_handlers/buildGetRequestHandler.ts
  40. 53
      striker-ui-api/src/lib/request_handlers/command/buildHostPowerHandler.ts
  41. 88
      striker-ui-api/src/lib/request_handlers/command/getHostSSH.ts
  42. 4
      striker-ui-api/src/lib/request_handlers/command/index.ts
  43. 3
      striker-ui-api/src/lib/request_handlers/command/poweroffHost.ts
  44. 3
      striker-ui-api/src/lib/request_handlers/command/rebootHost.ts
  45. 26
      striker-ui-api/src/lib/request_handlers/command/updateSystem.ts
  46. 40
      striker-ui-api/src/lib/request_handlers/file/buildQueryFileDetail.ts
  47. 31
      striker-ui-api/src/lib/request_handlers/file/getFile.ts
  48. 12
      striker-ui-api/src/lib/request_handlers/file/getFileDetail.ts
  49. 68
      striker-ui-api/src/lib/request_handlers/host/buildQueryHostDetail.ts
  50. 164
      striker-ui-api/src/lib/request_handlers/host/configStriker.ts
  51. 8
      striker-ui-api/src/lib/request_handlers/host/createHost.ts
  52. 190
      striker-ui-api/src/lib/request_handlers/host/createHostConnection.ts
  53. 42
      striker-ui-api/src/lib/request_handlers/host/deleteHostConnection.ts
  54. 48
      striker-ui-api/src/lib/request_handlers/host/getHost.ts
  55. 128
      striker-ui-api/src/lib/request_handlers/host/getHostConnection.ts
  56. 19
      striker-ui-api/src/lib/request_handlers/host/getHostDetail.ts
  57. 8
      striker-ui-api/src/lib/request_handlers/host/index.ts
  58. 150
      striker-ui-api/src/lib/request_handlers/host/prepareHost.ts
  59. 39
      striker-ui-api/src/lib/request_handlers/host/setHostInstallTarget.ts
  60. 8
      striker-ui-api/src/lib/request_handlers/host/updateHost.ts
  61. 82
      striker-ui-api/src/lib/request_handlers/job/getJob.ts
  62. 1
      striker-ui-api/src/lib/request_handlers/job/index.ts
  63. 54
      striker-ui-api/src/lib/request_handlers/network-interface/getNetworkInterface.ts
  64. 1
      striker-ui-api/src/lib/request_handlers/network-interface/index.ts
  65. 158
      striker-ui-api/src/lib/request_handlers/server/createServer.ts
  66. 60
      striker-ui-api/src/lib/request_handlers/server/getServer.ts
  67. 152
      striker-ui-api/src/lib/request_handlers/server/getServerDetail.ts
  68. 3
      striker-ui-api/src/lib/request_handlers/server/index.ts
  69. 41
      striker-ui-api/src/lib/request_handlers/ssh-key/deleteSSHKeyConflict.ts
  70. 66
      striker-ui-api/src/lib/request_handlers/ssh-key/getSSHKeyConflict.ts
  71. 2
      striker-ui-api/src/lib/request_handlers/ssh-key/index.ts
  72. 27
      striker-ui-api/src/lib/request_handlers/user/getUser.ts
  73. 1
      striker-ui-api/src/lib/request_handlers/user/index.ts
  74. 71
      striker-ui-api/src/lib/sanitize.ts
  75. 2
      striker-ui-api/src/lib/sanitizeSQLParam.ts
  76. 46
      striker-ui-api/src/lib/shell.ts
  77. 6
      striker-ui-api/src/middlewares/uploadSharedFiles.ts
  78. 9
      striker-ui-api/src/routes/anvil.ts
  79. 18
      striker-ui-api/src/routes/command.ts
  80. 4
      striker-ui-api/src/routes/echo.ts
  81. 88
      striker-ui-api/src/routes/file.ts
  82. 28
      striker-ui-api/src/routes/host.ts
  83. 27
      striker-ui-api/src/routes/index.ts
  84. 9
      striker-ui-api/src/routes/job.ts
  85. 9
      striker-ui-api/src/routes/network-interface.ts
  86. 16
      striker-ui-api/src/routes/server.ts
  87. 14
      striker-ui-api/src/routes/ssh-key.ts
  88. 9
      striker-ui-api/src/routes/user.ts
  89. 3
      striker-ui-api/src/types/AnvilDataStruct.d.ts
  90. 33
      striker-ui-api/src/types/AnvilDetail.d.ts
  91. 8
      striker-ui-api/src/types/AnvilOverview.d.ts
  92. 3
      striker-ui-api/src/types/BuildGetRequestHandlerOptions.d.ts
  93. 9
      striker-ui-api/src/types/BuildQueryDetailFunction.d.ts
  94. 10
      striker-ui-api/src/types/BuildQueryFunction.d.ts
  95. 4
      striker-ui-api/src/types/CallOptions.d.ts
  96. 11
      striker-ui-api/src/types/CreateHostConnectionRequestBody.d.ts
  97. 9
      striker-ui-api/src/types/DBInsertOrUpdateFunctionCommon.d.ts
  98. 11
      striker-ui-api/src/types/DBInsertOrUpdateJobFunction.d.ts
  99. 18
      striker-ui-api/src/types/DBInsertOrUpdateVariableFunction.d.ts
  100. 3
      striker-ui-api/src/types/DBJobAnvilSyncSharedOptions.d.ts
  101. Some files were not shown because too many files have changed in this diff Show More

@ -1083,6 +1083,9 @@ sub _set_paths
'redhat-release' => "/etc/redhat-release", 'redhat-release' => "/etc/redhat-release",
fences_unified_metadata => "/var/www/html/fences_unified_metadata.xml", fences_unified_metadata => "/var/www/html/fences_unified_metadata.xml",
}, },
devices => {
stdout => "/dev/stdout",
},
directories => { directories => {
alert_emails => "/var/spool/anvil", alert_emails => "/var/spool/anvil",
anvil => "/etc/anvil", anvil => "/etc/anvil",
@ -1114,6 +1117,7 @@ sub _set_paths
syslinux => "/usr/share/syslinux", syslinux => "/usr/share/syslinux",
tftpboot => "/var/lib/tftpboot", tftpboot => "/var/lib/tftpboot",
temp => "/tmp/anvil", temp => "/tmp/anvil",
tmp => "/tmp",
tools => "/usr/sbin", tools => "/usr/sbin",
units => "/usr/lib/systemd/system", units => "/usr/lib/systemd/system",
}, },
@ -1148,6 +1152,7 @@ sub _set_paths
'anvil-update-states' => "/usr/sbin/anvil-update-states", 'anvil-update-states' => "/usr/sbin/anvil-update-states",
'anvil-update-system' => "/usr/sbin/anvil-update-system", 'anvil-update-system' => "/usr/sbin/anvil-update-system",
augtool => "/usr/bin/augtool", augtool => "/usr/bin/augtool",
base64 => "/usr/bin/base64",
blockdev => "/usr/sbin/blockdev", blockdev => "/usr/sbin/blockdev",
bridge => "/usr/sbin/bridge", bridge => "/usr/sbin/bridge",
bzip2 => "/usr/bin/bzip2", bzip2 => "/usr/bin/bzip2",
@ -1224,6 +1229,8 @@ sub _set_paths
ocf_alteeve => "/usr/lib/ocf/resource.d/alteeve/server", ocf_alteeve => "/usr/lib/ocf/resource.d/alteeve/server",
openssl => "/usr/bin/openssl", openssl => "/usr/bin/openssl",
'osinfo-query' => "/usr/bin/osinfo-query", 'osinfo-query' => "/usr/bin/osinfo-query",
pamscale => "/usr/bin/pamscale",
pamtopng => "/usr/bin/pamtopng",
passwd => "/usr/bin/passwd", passwd => "/usr/bin/passwd",
pcs => "/usr/sbin/pcs", pcs => "/usr/sbin/pcs",
perccli64 => "/opt/MegaRAID/perccli/perccli64", perccli64 => "/opt/MegaRAID/perccli/perccli64",

@ -2150,10 +2150,11 @@ This is the Anvil! UUID we're looking for the primary node in.
=cut =cut
sub get_primary_host_uuid sub get_primary_host_uuid
{ {
my $self = shift; my $self = shift;
my $parameter = shift; my $parameter = shift;
my $anvil = $self->parent; my $anvil = $self->parent;
my $debug = defined $parameter->{debug} ? $parameter->{debug} : 3; my $test_access_user = defined $parameter->{test_access_user} ? $parameter->{test_access_user} : undef;
my $debug = defined $parameter->{debug} ? $parameter->{debug} : 3;
$anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, key => "log_0125", variables => { method => "Cluster->get_primary_host_uuid()" }}); $anvil->Log->entry({source => $THIS_FILE, line => __LINE__, level => $debug, key => "log_0125", variables => { method => "Cluster->get_primary_host_uuid()" }});
my $anvil_uuid = defined $parameter->{anvil_uuid} ? $parameter->{anvil_uuid} : ""; my $anvil_uuid = defined $parameter->{anvil_uuid} ? $parameter->{anvil_uuid} : "";
@ -2201,11 +2202,13 @@ sub get_primary_host_uuid
debug => $debug, debug => $debug,
target => $node1_target_ip, target => $node1_target_ip,
password => $password, password => $password,
user => $test_access_user,
}); });
my $node2_access = $anvil->Remote->test_access({ my $node2_access = $anvil->Remote->test_access({
debug => $debug, debug => $debug,
target => $node2_target_ip, target => $node2_target_ip,
password => $password, password => $password,
user => $test_access_user,
}); });
$anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => { $anvil->Log->variables({source => $THIS_FILE, line => __LINE__, level => $debug, list => {
node1_access => $node1_access, node1_access => $node1_access,

@ -0,0 +1,5 @@
# dependencies
node_modules
# output
out

@ -0,0 +1,14 @@
{
"env": {
"es2022": true,
"node": true
},
"extends": [
"eslint:recommended",
"plugin:import/errors",
"plugin:import/typescript",
"plugin:import/warnings",
"prettier"
],
"plugins": ["import"]
}

@ -0,0 +1,5 @@
# dependencies
node_modules
# output
out

@ -1,18 +0,0 @@
const cors = require('cors');
const express = require('express');
const path = require('path');
const API_ROOT_PATH = require('./lib/consts/API_ROOT_PATH');
const echoRouter = require('./routes/echo');
const filesRouter = require('./routes/files');
const app = express();
app.use(express.json());
app.use(cors());
app.use(path.join(API_ROOT_PATH, 'echo'), echoRouter);
app.use(path.join(API_ROOT_PATH, 'files'), filesRouter);
module.exports = app;

@ -1,94 +0,0 @@
const { spawnSync } = require('child_process');
const SERVER_PATHS = require('./consts/SERVER_PATHS');
const execStrikerAccessDatabase = (
args,
options = {
timeout: 10000,
encoding: 'utf-8',
},
) => {
const { error, stdout, stderr } = spawnSync(
SERVER_PATHS.usr.sbin['striker-access-database'].self,
args,
options,
);
if (error) {
throw error;
}
if (stderr) {
throw new Error(stderr);
}
let output;
try {
output = JSON.parse(stdout);
} catch (stdoutParseError) {
output = stdout;
console.warn(
`Failed to parse striker-access-database output [${output}]; error: [${stdoutParseError}]`,
);
}
return {
stdout: output,
};
};
const execDatabaseModuleSubroutine = (subName, subParams, options) => {
const args = ['--sub', subName];
if (subParams) {
args.push('--sub-params', JSON.stringify(subParams));
}
const { stdout } = execStrikerAccessDatabase(args, options);
return {
stdout: stdout['sub_results'],
};
};
const accessDB = {
dbJobAnvilSyncShared: (
jobName,
jobData,
jobTitle,
jobDescription,
{ jobHostUUID } = { jobHostUUID: undefined },
) => {
const subParams = {
file: __filename,
line: 0,
job_command: SERVER_PATHS.usr.sbin['anvil-sync-shared'].self,
job_data: jobData,
job_name: `storage::${jobName}`,
job_title: `job_${jobTitle}`,
job_description: `job_${jobDescription}`,
job_progress: 0,
};
if (jobHostUUID) {
subParams.job_host_uuid = jobHostUUID;
}
console.log(JSON.stringify(subParams, null, 2));
return execDatabaseModuleSubroutine('insert_or_update_jobs', subParams)
.stdout;
},
dbQuery: (query, options) =>
execStrikerAccessDatabase(['--query', query], options),
dbSub: execDatabaseModuleSubroutine,
dbSubRefreshTimestamp: () =>
execDatabaseModuleSubroutine('refresh_timestamp').stdout,
dbWrite: (query, options) =>
execStrikerAccessDatabase(['--query', query, '--mode', 'write'], options),
};
module.exports = accessDB;

@ -1,3 +0,0 @@
const API_ROOT_PATH = '/api';
module.exports = API_ROOT_PATH;

@ -1,34 +0,0 @@
const path = require('path');
const SERVER_PATHS = {
mnt: {
shared: {
incoming: {},
},
},
usr: {
sbin: {
'anvil-sync-shared': {},
'striker-access-database': {},
},
},
};
const generatePaths = (
currentObject,
parents = path.parse(process.cwd()).root,
) => {
Object.keys(currentObject).forEach((pathKey) => {
const currentPath = path.join(parents, pathKey);
currentObject[pathKey].self = currentPath;
if (pathKey !== 'self') {
generatePaths(currentObject[pathKey], currentPath);
}
});
};
generatePaths(SERVER_PATHS);
module.exports = SERVER_PATHS;

@ -1,29 +0,0 @@
const { dbQuery } = require('../../accessDB');
const buildGetFiles = (query) => (request, response) => {
console.log('Calling CLI script to get data.');
let queryStdout;
try {
({ stdout: queryStdout } = dbQuery(
typeof query === 'function' ? query(request) : query,
));
} catch (queryError) {
console.log(`Query error: ${queryError}`);
response.status(500).send();
}
console.log(
`Query stdout (type=[${typeof queryStdout}]): ${JSON.stringify(
queryStdout,
null,
2,
)}`,
);
response.json(queryStdout);
};
module.exports = buildGetFiles;

@ -1,25 +0,0 @@
const buildGetFiles = require('./buildGetFiles');
const getFileDetail = buildGetFiles(
(request) =>
`SELECT
fil.file_uuid,
fil.file_name,
fil.file_size,
fil.file_type,
fil.file_md5sum,
fil_loc.file_location_uuid,
fil_loc.file_location_active,
anv.anvil_uuid,
anv.anvil_name,
anv.anvil_description
FROM files AS fil
JOIN file_locations AS fil_loc
ON fil.file_uuid = fil_loc.file_location_file_uuid
JOIN anvils AS anv
ON fil_loc.file_location_anvil_uuid = anv.anvil_uuid
WHERE fil.file_uuid = '${request.params.fileUUID}'
AND fil.file_type != 'DELETED';`,
);
module.exports = getFileDetail;

@ -1,13 +0,0 @@
const buildGetFiles = require('./buildGetFiles');
const getFilesOverview = buildGetFiles(`
SELECT
file_uuid,
file_name,
file_size,
file_type,
file_md5sum
FROM files
WHERE file_type != 'DELETED';`);
module.exports = getFilesOverview;

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

@ -4,11 +4,11 @@
"description": "API for striker-ui", "description": "API for striker-ui",
"scripts": { "scripts": {
"build": "webpack", "build": "webpack",
"build:clean": "rm -rf out", "eslint:base": "eslint --ext js,ts --max-warnings=0",
"dev": "node index.js", "lint": "npm run eslint:base -- . && npm run prettier:base -- --check",
"rebuild": "npm run build:clean && npm run build", "lint:fix": "npm run eslint:base -- --fix . && npm run prettier:base -- --write",
"start": "npm run rebuild && node out/index.js", "prettier:base": "prettier '**/*.{js,json,md,ts}'",
"style:fix": "prettier --write *" "start": "npm run build && node out/index.js"
}, },
"dependencies": { "dependencies": {
"cors": "^2.8.5", "cors": "^2.8.5",
@ -18,8 +18,19 @@
"devDependencies": { "devDependencies": {
"@babel/core": "^7.17.8", "@babel/core": "^7.17.8",
"@babel/preset-env": "^7.16.11", "@babel/preset-env": "^7.16.11",
"@babel/preset-typescript": "^7.16.7",
"@types/cors": "^2.8.12",
"@types/express": "^4.17.13",
"@types/multer": "^1.4.7",
"@types/node": "^17.0.22",
"@typescript-eslint/eslint-plugin": "^5.16.0",
"@typescript-eslint/parser": "^5.16.0",
"babel-loader": "^8.2.3", "babel-loader": "^8.2.3",
"eslint": "^8.10.0",
"eslint-config-prettier": "^8.5.0",
"eslint-plugin-import": "^2.25.4",
"prettier": "^2.5.0", "prettier": "^2.5.0",
"typescript": "^4.6.2",
"webpack": "^5.70.0", "webpack": "^5.70.0",
"webpack-cli": "^4.9.2" "webpack-cli": "^4.9.2"
} }

@ -0,0 +1,8 @@
{
"extends": ["../.eslintrc.json", "plugin:@typescript-eslint/recommended"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"project": "./tsconfig.json"
},
"plugins": ["@typescript-eslint"]
}

@ -0,0 +1,18 @@
import cors from 'cors';
import express from 'express';
import path from 'path';
import API_ROOT_PATH from './lib/consts/API_ROOT_PATH';
import routes from './routes';
const app = express();
app.use(express.json());
app.use(cors());
Object.entries(routes).forEach(([route, router]) => {
app.use(path.join(API_ROOT_PATH, route), router);
});
export default app;

@ -1,6 +1,6 @@
const app = require('./app'); import app from './app';
const SERVER_PORT = require('./lib/consts/SERVER_PORT'); import SERVER_PORT from './lib/consts/SERVER_PORT';
app.listen(SERVER_PORT, () => { app.listen(SERVER_PORT, () => {
console.log(`Listening on localhost:${SERVER_PORT}.`); console.log(`Listening on localhost:${SERVER_PORT}.`);

@ -0,0 +1,207 @@
import { spawnSync, SpawnSyncOptions } from 'child_process';
import SERVER_PATHS from './consts/SERVER_PATHS';
import { stderr as sherr, stdout as shout } from './shell';
const formatQuery = (query: string) => query.replace(/\s+/g, ' ');
const execAnvilAccessModule = (
args: string[],
options: SpawnSyncOptions = {
encoding: 'utf-8',
timeout: 10000,
},
) => {
const { error, stdout, stderr } = spawnSync(
SERVER_PATHS.usr.sbin['anvil-access-module'].self,
args,
options,
);
if (error) {
throw error;
}
if (stderr.length > 0) {
throw new Error(stderr.toString());
}
let output;
try {
output = JSON.parse(stdout.toString());
} catch (stdoutParseError) {
output = stdout;
sherr(
`Failed to parse anvil-access-module output [${output}]; CAUSE: [${stdoutParseError}]`,
);
}
return {
stdout: output,
};
};
const execModuleSubroutine = (
subName: string,
{
spawnSyncOptions,
subModuleName,
subParams,
}: ExecModuleSubroutineOptions = {},
) => {
const args = ['--sub', subName];
// Defaults to "Database" in anvil-access-module.
if (subModuleName) {
args.push('--sub-module', subModuleName);
}
if (subParams) {
args.push('--sub-params', JSON.stringify(subParams));
}
shout(
`...${subModuleName}->${subName} with params: ${JSON.stringify(
subParams,
null,
2,
)}`,
);
const { stdout } = execAnvilAccessModule(args, spawnSyncOptions);
return {
stdout: stdout['sub_results'],
};
};
const dbInsertOrUpdateJob = (
{ job_progress = 0, line = 0, ...rest }: DBJobParams,
{ spawnSyncOptions }: DBInsertOrUpdateJobOptions = {},
) =>
execModuleSubroutine('insert_or_update_jobs', {
spawnSyncOptions,
subParams: { job_progress, line, ...rest },
}).stdout;
const dbInsertOrUpdateVariable: DBInsertOrUpdateVariableFunction = (
subParams,
{ spawnSyncOptions } = {},
) =>
execModuleSubroutine('insert_or_update_variables', {
spawnSyncOptions,
subParams,
}).stdout;
const dbJobAnvilSyncShared = (
jobName: string,
jobData: string,
jobTitle: string,
jobDescription: string,
{ jobHostUUID }: DBJobAnvilSyncSharedOptions = { jobHostUUID: undefined },
) => {
const subParams: DBJobParams = {
file: __filename,
job_command: SERVER_PATHS.usr.sbin['anvil-sync-shared'].self,
job_data: jobData,
job_name: `storage::${jobName}`,
job_title: `job_${jobTitle}`,
job_description: `job_${jobDescription}`,
};
if (jobHostUUID) {
subParams.job_host_uuid = jobHostUUID;
}
return dbInsertOrUpdateJob(subParams);
};
const dbQuery = (query: string, options?: SpawnSyncOptions) => {
shout(formatQuery(query));
return execAnvilAccessModule(['--query', query], options);
};
const dbSubRefreshTimestamp = () =>
execModuleSubroutine('refresh_timestamp').stdout;
const dbWrite = (query: string, options?: SpawnSyncOptions) => {
shout(formatQuery(query));
return execAnvilAccessModule(['--query', query, '--mode', 'write'], options);
};
const getAnvilData = (
dataStruct: AnvilDataStruct,
{ predata, ...spawnSyncOptions }: GetAnvilDataOptions = {},
) =>
execAnvilAccessModule(
[
'--predata',
JSON.stringify(predata),
'--data',
JSON.stringify(dataStruct),
],
spawnSyncOptions,
).stdout;
const getLocalHostUUID = () => {
let result: string;
try {
result = execModuleSubroutine('host_uuid', {
subModuleName: 'Get',
}).stdout;
} catch (subError) {
throw new Error(`Failed to get localhost UUID; CAUSE: ${subError}`);
}
shout(`localHostUUID=[${result}]`);
return result;
};
const getPeerData: GetPeerDataFunction = (
target,
{ password, port, ...restOptions } = {},
) => {
const [
rawIsConnected,
{
host_name: hostName,
host_os: hostOS,
host_uuid: hostUUID,
internet: rawIsInetConnected,
os_registered: rawIsOSRegistered,
},
] = execModuleSubroutine('get_peer_data', {
subModuleName: 'Striker',
subParams: { password, port, target },
...restOptions,
}).stdout as [connected: string, data: PeerDataHash];
return {
hostName,
hostOS,
hostUUID,
isConnected: rawIsConnected === '1',
isInetConnected: rawIsInetConnected === '1',
isOSRegistered: rawIsOSRegistered === 'yes',
};
};
export {
dbInsertOrUpdateJob as job,
dbInsertOrUpdateVariable as variable,
dbJobAnvilSyncShared,
dbQuery,
dbSubRefreshTimestamp,
dbWrite,
getAnvilData,
getLocalHostUUID,
getPeerData,
execModuleSubroutine as sub,
};

@ -0,0 +1,38 @@
import call from './call';
import join from './join';
import { sanitize } from './sanitize';
const buildIDCondition = (
keys: Parameters<JoinFunction>[0],
conditionPrefix: string,
{
onFallback,
beforeReturn = (result) =>
result
? `${conditionPrefix} IN (${result})`
: call(onFallback, { notCallableReturn: '' }),
}: Pick<JoinOptions, 'beforeReturn'> & { onFallback?: () => string } = {},
) =>
join(keys, {
beforeReturn,
elementWrapper: "'",
separator: ', ',
}) as string;
export const buildUnknownIDCondition = (
keys: unknown,
conditionPrefix: string,
{ onFallback }: { onFallback?: () => string },
): { after: string; before: string[] } => {
const before = sanitize(keys, 'string[]', {
modifierType: 'sql',
});
const after = buildIDCondition(before, conditionPrefix, { onFallback });
return { after, before };
};
export const buildKnownIDCondition = (
keys: string[] | '*' = '*',
conditionPrefix: string,
) => (keys[0] === '*' ? '' : buildIDCondition(keys, conditionPrefix));

@ -0,0 +1,14 @@
type QueryField = string;
export const buildQueryResultModifier =
<T>(mod: (output: QueryField[][]) => T): QueryResultModifierFunction =>
(output) =>
output instanceof Array ? mod(output) : output;
export const buildQueryResultReducer = <T>(
reduce: (previous: T, row: QueryField[]) => T,
initialValue: T,
) =>
buildQueryResultModifier<T>((output) =>
output.reduce<T>(reduce, initialValue),
);

@ -0,0 +1,7 @@
const call = <T = unknown>(
toCall: unknown,
{ parameters = [], notCallableReturn }: CallOptions = {},
): T =>
typeof toCall === 'function' ? toCall(...parameters) : notCallableReturn;
export default call;

@ -0,0 +1,4 @@
export const cap = (
value: string,
{ locales }: { locales?: string | string[] } = {},
) => `${value[0].toLocaleUpperCase(locales)}${value.slice(1)}`;

@ -0,0 +1,3 @@
const API_ROOT_PATH = '/api';
export default API_ROOT_PATH;

@ -0,0 +1 @@
export const HOST_KEY_CHANGED_PREFIX = 'host_key_changed::';

@ -0,0 +1 @@
export const LOCAL = 'local';

@ -0,0 +1,4 @@
// Unit: bytes
const NODE_AND_DR_RESERVED_MEMORY_SIZE = 8589934592;
export default NODE_AND_DR_RESERVED_MEMORY_SIZE;

@ -0,0 +1,26 @@
import { execSync } from 'child_process';
import SERVER_PATHS from './SERVER_PATHS';
type OSKeyMapToName = Record<string, string>;
const osList: string[] = execSync(
`${SERVER_PATHS.usr.sbin['striker-parse-os-list'].self} | ${SERVER_PATHS.usr.bin['sed'].self} -E 's/^.*name="os_list_([^"]+).*CDATA[[]([^]]+).*$/\\1,\\2/'`,
{
encoding: 'utf-8',
timeout: 10000,
},
).split('\n');
osList.pop();
const osKeyMapToName: OSKeyMapToName = osList.reduce((map, csv) => {
const [osKey, osName] = csv.split(',', 2);
map[osKey] = osName;
return map;
}, {} as OSKeyMapToName);
export const OS_LIST: Readonly<string[]> = osList;
export const OS_LIST_MAP: Readonly<OSKeyMapToName> = osKeyMapToName;

@ -0,0 +1,23 @@
const hex = '[0-9a-f]';
const octet = '(?:25[0-5]|(?:2[0-4]|1[0-9]|[1-9]|)[0-9])';
const alphanumeric = '[a-z0-9]';
const alphanumericDash = '[a-z0-9-]';
const ipv4 = `(?:${octet}[.]){3}${octet}`;
export const REP_DOMAIN = new RegExp(
`^(?:${alphanumeric}(?:${alphanumericDash}{0,61}${alphanumeric})?[.])+${alphanumeric}${alphanumericDash}{0,61}${alphanumeric}$`,
);
export const REP_INTEGER = /^\d+$/;
export const REP_IPV4 = new RegExp(`^${ipv4}$`);
export const REP_IPV4_CSV = new RegExp(`(?:${ipv4},)*${ipv4}`);
// Peaceful string is temporarily defined as a string without single-quote, double-quote, slash (/), backslash (\\), angle brackets (< >), and curly brackets ({ }).
export const REP_PEACEFUL_STRING = /^[^'"/\\><}{]+$/;
export const REP_UUID = new RegExp(
`^${hex}{8}-${hex}{4}-[1-5]${hex}{3}-[89ab]${hex}{3}-${hex}{12}$`,
'i',
);

@ -0,0 +1,54 @@
import path from 'path';
const EMPTY_SERVER_PATHS: ServerPath = {
mnt: {
shared: {
incoming: {},
},
},
tmp: {},
usr: {
bin: {
date: {},
mkfifo: {},
psql: {},
rm: {},
sed: {},
},
sbin: {
'anvil-access-module': {},
'anvil-configure-host': {},
'anvil-get-server-screenshot': {},
'anvil-manage-keys': {},
'anvil-manage-power': {},
'anvil-provision-server': {},
'anvil-sync-shared': {},
'anvil-update-system': {},
'striker-initialize-host': {},
'striker-manage-install-target': {},
'striker-manage-peers': {},
'striker-parse-os-list': {},
},
},
};
const generatePaths = (
currentObject: ServerPath,
parents = path.parse(process.cwd()).root,
) => {
Object.keys(currentObject).forEach((pathKey) => {
if (pathKey !== 'self') {
const currentPath = path.join(parents, pathKey);
currentObject[pathKey].self = currentPath;
generatePaths(currentObject[pathKey], currentPath);
}
});
return currentObject as ReadonlyServerPath;
};
const SERVER_PATHS = generatePaths(EMPTY_SERVER_PATHS);
export default SERVER_PATHS;

@ -1,3 +1,3 @@
const SERVER_PORT = process.env.SERVER_PORT ?? 8080; const SERVER_PORT = process.env.SERVER_PORT ?? 8080;
module.exports = SERVER_PORT; export default SERVER_PORT;

@ -0,0 +1,13 @@
import { LOCAL } from './consts/LOCAL';
import { getLocalHostUUID } from './accessModule';
export const toHostUUID = (
hostUUID: string,
localHostUUID: string = getLocalHostUUID(),
) => (hostUUID === LOCAL ? localHostUUID : hostUUID);
export const toLocal = (
hostUUID: string,
localHostUUID: string = getLocalHostUUID(),
) => (hostUUID === localHostUUID ? LOCAL : hostUUID);

@ -0,0 +1,2 @@
export const getShortHostName = (hostName: string) =>
hostName.replace(/[.].*$/, '');

@ -0,0 +1,27 @@
import call from './call';
const join: JoinFunction = (
elements,
{ beforeReturn, elementWrapper = '', onEach, separator = '' } = {},
) => {
const joinSeparator = `${elementWrapper}${separator}${elementWrapper}`;
const toReturn =
elements instanceof Array && elements.length > 0
? `${elementWrapper}${elements.slice(1).reduce<string>(
(previous, element) =>
`${previous}${joinSeparator}${call<string>(onEach, {
notCallableReturn: element,
parameters: [element],
})}`,
elements[0],
)}${elementWrapper}`
: undefined;
return call<string | undefined>(beforeReturn, {
notCallableReturn: toReturn,
parameters: [toReturn],
});
};
export default join;

@ -0,0 +1,5 @@
export const match = (
value: string,
regexp: string | RegExp,
{ fallbackResult = [] }: { fallbackResult?: string[] } = {},
) => value.match(regexp) ?? fallbackResult;

@ -0,0 +1,323 @@
import NODE_AND_DR_RESERVED_MEMORY_SIZE from '../../consts/NODE_AND_DR_RESERVED_MEMORY_SIZE';
import { OS_LIST } from '../../consts/OS_LIST';
import join from '../../join';
const buildQueryAnvilDetail = ({
anvilUUIDs = ['*'],
isForProvisionServer,
}: {
anvilUUIDs?: string[] | '*';
isForProvisionServer?: boolean;
}) => {
const condAnvilsUUID = ['all', '*'].includes(anvilUUIDs[0])
? ''
: join(anvilUUIDs, {
beforeReturn: (toReturn) =>
toReturn ? `WHERE anv.anvil_uuid IN (${toReturn})` : '',
elementWrapper: "'",
separator: ', ',
});
console.log(`condAnvilsUUID=[${condAnvilsUUID}]`);
const buildHostQuery = ({
isSummary = false,
}: { isSummary?: boolean } = {}) => {
let fieldsToSelect = `
host_uuid,
host_name,
scan_hardware_cpu_cores,
scan_hardware_ram_total`;
let groupByPhrase = '';
if (isSummary) {
fieldsToSelect = `
MIN(scan_hardware_cpu_cores) AS anvil_total_cpu_cores,
MIN(scan_hardware_ram_total) AS anvil_total_memory`;
groupByPhrase = 'GROUP BY anvil_uuid';
}
return `
SELECT
anvil_uuid,
${fieldsToSelect}
FROM anvils AS anv
JOIN hosts AS hos
ON host_uuid IN (
anvil_node1_host_uuid,
anvil_node2_host_uuid,
anvil_dr1_host_uuid
)
JOIN scan_hardware AS sca_har
ON host_uuid = scan_hardware_host_uuid
${groupByPhrase}`;
};
const buildServerQuery = ({
isSummary = false,
}: { isSummary?: boolean } = {}) => {
let fieldsToSelect = `
server_uuid,
server_name,
server_cpu_cores,
server_memory`;
let groupByPhrase = '';
if (isSummary) {
fieldsToSelect = `
SUM(server_cpu_cores) AS anvil_total_allocated_cpu_cores,
SUM(server_memory) AS anvil_total_allocated_memory`;
groupByPhrase = 'GROUP BY server_anvil_uuid';
}
return `
SELECT
server_anvil_uuid,
${fieldsToSelect}
FROM servers AS ser
JOIN (
SELECT
server_definition_server_uuid,
server_cpu_cores,
CASE server_memory_unit
WHEN 'KiB' THEN server_memory_value * 1024
ELSE server_memory_value
END AS server_memory
FROM (
SELECT
server_definition_server_uuid,
CAST(
SUBSTRING(
server_definition_xml, '%cores=''#"[0-9]+#"''%', '#'
) AS INTEGER
) AS server_cpu_cores,
CAST(
SUBSTRING(
server_definition_xml, '%memory%>#"[0-9]+#"</memory%', '#'
) AS BIGINT
) AS server_memory_value,
SUBSTRING(
server_definition_xml, '%memory%unit=''#"[A-Za-z]+#"''%', '#'
) AS server_memory_unit
FROM server_definitions AS ser_def
) AS ser_def_memory_converted
) AS pos_ser_def
ON server_uuid = server_definition_server_uuid
${groupByPhrase}`;
};
const buildStorageGroupQuery = () => `
SELECT
storage_group_anvil_uuid,
storage_group_uuid,
storage_group_name,
MIN(scan_lvm_vg_size) AS storage_group_size,
MIN(scan_lvm_vg_free) AS storage_group_free
FROM storage_groups AS sto_gro
JOIN storage_group_members AS sto_gro_mem
ON storage_group_uuid = storage_group_member_storage_group_uuid
JOIN scan_lvm_vgs AS sca_lvm_vgs
ON storage_group_member_vg_uuid = scan_lvm_vg_internal_uuid
GROUP BY
storage_group_anvil_uuid,
storage_group_uuid,
storage_group_name`;
const buildFileQuery = () => `
SELECT
file_location_anvil_uuid,
file_uuid,
file_name
FROM file_locations as fil_loc
JOIN files as fil
ON file_location_file_uuid = file_uuid
WHERE
file_type = 'iso'
AND file_location_active = 't'`;
const buildQueryForProvisionServer = () => `
SELECT
anv.anvil_uuid,
anv.anvil_name,
anv.anvil_description,
host_list.host_uuid,
host_list.host_name,
host_list.scan_hardware_cpu_cores,
host_list.scan_hardware_ram_total,
host_summary.anvil_total_cpu_cores,
host_summary.anvil_total_memory,
server_list.server_uuid,
server_list.server_name,
server_list.server_cpu_cores,
server_list.server_memory,
server_summary.anvil_total_allocated_cpu_cores,
server_summary.anvil_total_allocated_memory,
(host_summary.anvil_total_cpu_cores
- server_summary.anvil_total_allocated_cpu_cores
) AS anvil_total_available_cpu_cores,
(host_summary.anvil_total_memory
- server_summary.anvil_total_allocated_memory
- ${NODE_AND_DR_RESERVED_MEMORY_SIZE}
) AS anvil_total_available_memory,
storage_group_list.storage_group_uuid,
storage_group_list.storage_group_name,
storage_group_list.storage_group_size,
storage_group_list.storage_group_free,
file_list.file_uuid,
file_list.file_name
FROM anvils AS anv
JOIN (${buildHostQuery()}) AS host_list
ON anv.anvil_uuid = host_list.anvil_uuid
JOIN (${buildHostQuery({ isSummary: true })}) AS host_summary
ON anv.anvil_uuid = host_summary.anvil_uuid
LEFT JOIN (${buildServerQuery()}) AS server_list
ON anv.anvil_uuid = server_list.server_anvil_uuid
LEFT JOIN (${buildServerQuery({ isSummary: true })}) AS server_summary
ON anv.anvil_uuid = server_summary.server_anvil_uuid
LEFT JOIN (${buildStorageGroupQuery()}) AS storage_group_list
ON anv.anvil_uuid = storage_group_list.storage_group_anvil_uuid
LEFT JOIN (${buildFileQuery()}) AS file_list
ON anv.anvil_uuid = file_list.file_location_anvil_uuid
;`;
let query = `
SELECT
*
FROM anvils AS anv
${condAnvilsUUID}
;`;
let afterQueryReturn = undefined;
if (isForProvisionServer) {
query = buildQueryForProvisionServer();
afterQueryReturn = (queryStdout: unknown) => {
let results = queryStdout;
if (queryStdout instanceof Array) {
let rowStage: AnvilDetailForProvisionServer | undefined;
const anvils = queryStdout.reduce<AnvilDetailForProvisionServer[]>(
(
reducedRows,
[
anvilUUID,
anvilName,
anvilDescription,
hostUUID,
hostName,
hostCPUCores,
hostMemory,
anvilTotalCPUCores,
anvilTotalMemory,
serverUUID,
serverName,
serverCPUCores,
serverMemory,
anvilTotalAllocatedCPUCores,
anvilTotalAllocatedMemory,
anvilTotalAvailableCPUCores,
anvilTotalAvailableMemory,
storageGroupUUID,
storageGroupName,
storageGroupSize,
storageGroupFree,
fileUUID,
fileName,
],
) => {
if (!rowStage || anvilUUID !== rowStage.anvilUUID) {
rowStage = {
anvilUUID,
anvilName,
anvilDescription,
anvilTotalCPUCores: parseInt(anvilTotalCPUCores),
anvilTotalMemory: String(anvilTotalMemory),
anvilTotalAllocatedCPUCores: parseInt(
anvilTotalAllocatedCPUCores,
),
anvilTotalAllocatedMemory: String(anvilTotalAllocatedMemory),
anvilTotalAvailableCPUCores: parseInt(
anvilTotalAvailableCPUCores,
),
anvilTotalAvailableMemory: String(anvilTotalAvailableMemory),
hosts: [],
servers: [],
storageGroups: [],
files: [],
};
reducedRows.push(rowStage);
}
if (
!rowStage.hosts.find(({ hostUUID: added }) => added === hostUUID)
) {
rowStage.hosts.push({
hostUUID,
hostName,
hostCPUCores: parseInt(hostCPUCores),
hostMemory: String(hostMemory),
});
}
if (
!rowStage.servers.find(
({ serverUUID: added }) => added === serverUUID,
)
) {
rowStage.servers.push({
serverUUID,
serverName,
serverCPUCores: parseInt(serverCPUCores),
serverMemory: String(serverMemory),
});
}
if (
!rowStage.storageGroups.find(
({ storageGroupUUID: added }) => added === storageGroupUUID,
)
) {
rowStage.storageGroups.push({
storageGroupUUID,
storageGroupName,
storageGroupSize: String(storageGroupSize),
storageGroupFree: String(storageGroupFree),
});
}
if (
!rowStage.files.find(({ fileUUID: added }) => added === fileUUID)
) {
rowStage.files.push({
fileUUID,
fileName,
});
}
return reducedRows;
},
[],
);
results = {
anvils,
osList: OS_LIST,
};
}
return results;
};
}
return {
query,
afterQueryReturn,
};
};
export default buildQueryAnvilDetail;

@ -0,0 +1,81 @@
import { RequestHandler } from 'express';
import buildGetRequestHandler from '../buildGetRequestHandler';
import buildQueryAnvilDetail from './buildQueryAnvilDetail';
import { sanitize } from '../../sanitize';
const getAnvil: RequestHandler = buildGetRequestHandler(
(request, buildQueryOptions) => {
const { anvilUUIDs, isForProvisionServer } = request.query;
let query = `
SELECT
anv.anvil_name,
anv.anvil_uuid,
hos.host_name,
hos.host_uuid
FROM anvils AS anv
JOIN hosts AS hos
ON hos.host_uuid IN (
anv.anvil_node1_host_uuid,
anv.anvil_node2_host_uuid,
anv.anvil_dr1_host_uuid
)
ORDER BY anv.anvil_uuid;`;
if (buildQueryOptions) {
buildQueryOptions.afterQueryReturn = (queryStdout) => {
let results = queryStdout;
if (queryStdout instanceof Array) {
let rowStage: AnvilOverview | undefined;
results = queryStdout.reduce<AnvilOverview[]>(
(reducedRows, [anvilName, anvilUUID, hostName, hostUUID]) => {
if (!rowStage || anvilUUID !== rowStage.anvilUUID) {
{
rowStage = {
anvilName,
anvilUUID,
hosts: [],
};
reducedRows.push(rowStage);
}
}
rowStage.hosts.push({ hostName, hostUUID });
return reducedRows;
},
[],
);
}
return results;
};
}
if (anvilUUIDs) {
const {
query: anvilDetailQuery,
afterQueryReturn: anvilDetailAfterQueryReturn,
} = buildQueryAnvilDetail({
anvilUUIDs: sanitize(anvilUUIDs, 'string[]', {
modifierType: 'sql',
}),
isForProvisionServer: sanitize(isForProvisionServer, 'boolean'),
});
query = anvilDetailQuery;
if (buildQueryOptions) {
buildQueryOptions.afterQueryReturn = anvilDetailAfterQueryReturn;
}
}
return query;
},
);
export default getAnvil;

@ -0,0 +1,34 @@
import { RequestHandler } from 'express';
import { sanitize } from '../sanitize';
import { stderr, stdout } from '../shell';
export const buildBranchRequestHandler: (map: {
[handler: string]: RequestHandler | undefined;
}) => RequestHandler =
(map) =>
(...args) => {
const [
{
query: { handler: rawHandler },
},
response,
] = args;
const handlerKey = sanitize(rawHandler, 'string');
stdout(`Create host handler: ${handlerKey}`);
// Ensure each handler sends a response at the end of any branch.
const handler = map[handlerKey];
if (handler) {
handler(...args);
} else {
stderr(`Handler is not registered; got [${handlerKey}]`);
response.status(400).send();
return;
}
};

@ -0,0 +1,64 @@
import { Request, Response } from 'express';
import { dbQuery } from '../accessModule';
import call from '../call';
const buildGetRequestHandler =
(
query: string | BuildQueryFunction,
{ beforeRespond }: BuildGetRequestHandlerOptions = {},
) =>
(request: Request, response: Response) => {
console.log('Calling CLI script to get data.');
const buildQueryOptions: BuildQueryOptions = {};
let queryStdout;
try {
({ stdout: queryStdout } = dbQuery(
call<string>(query, {
parameters: [request, buildQueryOptions],
notCallableReturn: query,
}),
));
} catch (queryError) {
console.log(`Failed to execute query; CAUSE: ${queryError}`);
response.status(500).send();
return;
}
console.log(
`Query stdout pre-hooks (type=[${typeof queryStdout}]): ${JSON.stringify(
queryStdout,
null,
2,
)}`,
);
const { afterQueryReturn } = buildQueryOptions;
queryStdout = call(afterQueryReturn, {
parameters: [queryStdout],
notCallableReturn: queryStdout,
});
queryStdout = call(beforeRespond, {
parameters: [queryStdout],
notCallableReturn: queryStdout,
});
console.log(
`Query stdout post-hooks (type=[${typeof queryStdout}]): ${JSON.stringify(
queryStdout,
null,
2,
)}`,
);
response.json(queryStdout);
};
export default buildGetRequestHandler;

@ -0,0 +1,53 @@
import { RequestHandler } from 'express';
import SERVER_PATHS from '../../consts/SERVER_PATHS';
import { job } from '../../accessModule';
import { stderr } from '../../shell';
type DistinctDBJobParams = Omit<
DBJobParams,
'file' | 'line' | 'job_data' | 'job_progress'
>;
const MANAGE_HOST_POWER_JOB_PARAMS: {
poweroff: DistinctDBJobParams;
reboot: DistinctDBJobParams;
} = {
poweroff: {
job_command: `${SERVER_PATHS.usr.sbin['anvil-manage-power'].self} --poweroff -y`,
job_name: 'poweroff::system',
job_title: 'job_0010',
job_description: 'job_0008',
},
reboot: {
job_command: `${SERVER_PATHS.usr.sbin['anvil-manage-power'].self} --reboot -y`,
job_name: 'reboot::system',
job_title: 'job_0009',
job_description: 'job_0006',
},
};
export const buildHostPowerHandler: (
task?: 'poweroff' | 'reboot',
) => RequestHandler =
(task = 'reboot') =>
(request, response) => {
const subParams: DBJobParams = {
file: __filename,
...MANAGE_HOST_POWER_JOB_PARAMS[task],
};
try {
job(subParams);
} catch (subError) {
stderr(`Failed to ${task} host; CAUSE: ${subError}`);
response.status(500).send();
return;
}
response.status(204).send();
};

@ -0,0 +1,88 @@
import { RequestHandler } from 'express';
import { HOST_KEY_CHANGED_PREFIX } from '../../consts/HOST_KEY_CHANGED_PREFIX';
import { dbQuery, getLocalHostUUID, getPeerData } from '../../accessModule';
import { sanitizeSQLParam } from '../../sanitizeSQLParam';
import { stderr } from '../../shell';
export const getHostSSH: RequestHandler<
unknown,
{
badSSHKeys?: DeleteSSHKeyConflictRequestBody;
hostName: string;
hostOS: string;
hostUUID: string;
isConnected: boolean;
isInetConnected: boolean;
isOSRegistered: boolean;
},
{
password: string;
port?: number;
ipAddress: string;
}
> = (request, response) => {
const {
body: { password, port = 22, ipAddress: target },
} = request;
let hostName: string;
let hostOS: string;
let hostUUID: string;
let isConnected: boolean;
let isInetConnected: boolean;
let isOSRegistered: boolean;
const localHostUUID = getLocalHostUUID();
try {
({
hostName,
hostOS,
hostUUID,
isConnected,
isInetConnected,
isOSRegistered,
} = getPeerData(target, { password, port }));
} catch (subError) {
stderr(`Failed to get peer data; CAUSE: ${subError}`);
response.status(500).send();
return;
}
let badSSHKeys: DeleteSSHKeyConflictRequestBody | undefined;
if (!isConnected) {
const rows = dbQuery(`
SELECT sta.state_note, sta.state_uuid
FROM states AS sta
WHERE sta.state_host_uuid = '${localHostUUID}'
AND sta.state_name = '${HOST_KEY_CHANGED_PREFIX}${sanitizeSQLParam(
target,
)}';`).stdout as [stateNote: string, stateUUID: string][];
if (rows.length > 0) {
badSSHKeys = rows.reduce<DeleteSSHKeyConflictRequestBody>(
(previous, [, stateUUID]) => {
previous[localHostUUID].push(stateUUID);
return previous;
},
{ [localHostUUID]: [] },
);
}
}
response.status(200).send({
badSSHKeys,
hostName,
hostOS,
hostUUID,
isConnected,
isInetConnected,
isOSRegistered,
});
};

@ -0,0 +1,4 @@
export * from './getHostSSH';
export * from './poweroffHost';
export * from './rebootHost';
export * from './updateSystem';

@ -0,0 +1,3 @@
import { buildHostPowerHandler } from './buildHostPowerHandler';
export const poweroffHost = buildHostPowerHandler('poweroff');

@ -0,0 +1,3 @@
import { buildHostPowerHandler } from './buildHostPowerHandler';
export const rebootHost = buildHostPowerHandler('reboot');

@ -0,0 +1,26 @@
import { RequestHandler } from 'express';
import SERVER_PATHS from '../../consts/SERVER_PATHS';
import { job } from '../../accessModule';
import { stderr } from '../../shell';
export const updateSystem: RequestHandler = (request, response) => {
try {
job({
file: __filename,
job_command: SERVER_PATHS.usr.sbin['anvil-update-system'].self,
job_description: 'job_0004',
job_name: 'update::system',
job_title: 'job_0003',
});
} catch (subError) {
stderr(`Failed to initiate system update; CAUSE: ${subError}`);
response.status(500).send();
return;
}
response.status(204).send();
};

@ -0,0 +1,40 @@
import join from '../../join';
const buildQueryFileDetail = ({
fileUUIDs = ['*'],
}: {
fileUUIDs?: string[] | '*';
}) => {
const condFileUUIDs = ['all', '*'].includes(fileUUIDs[0])
? ''
: join(fileUUIDs, {
beforeReturn: (toReturn) =>
toReturn ? `AND fil.file_uuid IN (${toReturn})` : '',
elementWrapper: "'",
separator: ', ',
});
console.log(`condFilesUUID=[${condFileUUIDs}]`);
return `
SELECT
fil.file_uuid,
fil.file_name,
fil.file_size,
fil.file_type,
fil.file_md5sum,
fil_loc.file_location_uuid,
fil_loc.file_location_active,
anv.anvil_uuid,
anv.anvil_name,
anv.anvil_description
FROM files AS fil
JOIN file_locations AS fil_loc
ON fil.file_uuid = fil_loc.file_location_file_uuid
JOIN anvils AS anv
ON fil_loc.file_location_anvil_uuid = anv.anvil_uuid
WHERE fil.file_type != 'DELETED'
${condFileUUIDs};`;
};
export default buildQueryFileDetail;

@ -0,0 +1,31 @@
import { RequestHandler } from 'express';
import buildGetRequestHandler from '../buildGetRequestHandler';
import buildQueryFileDetail from './buildQueryFileDetail';
import { sanitize } from '../../sanitize';
const getFile: RequestHandler = buildGetRequestHandler((request) => {
const { fileUUIDs } = request.query;
let query = `
SELECT
file_uuid,
file_name,
file_size,
file_type,
file_md5sum
FROM files
WHERE file_type != 'DELETED';`;
if (fileUUIDs) {
query = buildQueryFileDetail({
fileUUIDs: sanitize(fileUUIDs, 'string[]', {
modifierType: 'sql',
}),
});
}
return query;
});
export default getFile;

@ -0,0 +1,12 @@
import { RequestHandler } from 'express';
import buildGetRequestHandler from '../buildGetRequestHandler';
import buildQueryFileDetail from './buildQueryFileDetail';
import { sanitizeSQLParam } from '../../sanitizeSQLParam';
const getFileDetail: RequestHandler = buildGetRequestHandler(
({ params: { fileUUID } }) =>
buildQueryFileDetail({ fileUUIDs: [sanitizeSQLParam(fileUUID)] }),
);
export default getFileDetail;

@ -0,0 +1,68 @@
import { buildKnownIDCondition } from '../../buildCondition';
import { buildQueryResultModifier } from '../../buildQueryResultModifier';
import { cap } from '../../cap';
import { getShortHostName } from '../../getShortHostName';
import { stdout } from '../../shell';
type ExtractVariableKeyFunction = (parts: string[]) => string;
const MAP_TO_EXTRACTOR: { [prefix: string]: ExtractVariableKeyFunction } = {
form: ([, part2]) => {
const [head, ...rest] = part2.split('_');
return rest.reduce<string>(
(previous, part) => `${previous}${cap(part)}`,
head,
);
},
'install-target': () => 'installTarget',
};
export const buildQueryHostDetail: BuildQueryDetailFunction = ({
keys: hostUUIDs = '*',
} = {}) => {
const condHostUUIDs = buildKnownIDCondition(hostUUIDs, 'AND hos.host_uuid');
stdout(`condHostUUIDs=[${condHostUUIDs}]`);
const query = `
SELECT
hos.host_name,
hos.host_uuid,
var.variable_name,
var.variable_value
FROM variables AS var
JOIN hosts AS hos
ON var.variable_source_uuid = hos.host_uuid
WHERE (
variable_name LIKE 'form::config_%'
OR variable_name = 'install-target::enabled'
)
${condHostUUIDs};`;
const afterQueryReturn: QueryResultModifierFunction =
buildQueryResultModifier((output) => {
const [hostName, hostUUID] = output[0];
const shortHostName = getShortHostName(hostName);
return output.reduce<
{ hostName: string; hostUUID: string; shortHostName: string } & Record<
string,
string
>
>(
(previous, [, , variableName, variableValue]) => {
const [variablePrefix, ...restVariableParts] =
variableName.split('::');
const key = MAP_TO_EXTRACTOR[variablePrefix](restVariableParts);
previous[key] = variableValue;
return previous;
},
{ hostName, hostUUID, shortHostName },
);
});
return { query, afterQueryReturn };
};

@ -0,0 +1,164 @@
import assert from 'assert';
import { RequestHandler } from 'express';
import {
REP_DOMAIN,
REP_INTEGER,
REP_IPV4,
REP_IPV4_CSV,
} from '../../consts/REG_EXP_PATTERNS';
import SERVER_PATHS from '../../consts/SERVER_PATHS';
import { job } from '../../accessModule';
const fvar = (configStepCount: number, fieldName: string) =>
['form', `config_step${configStepCount}`, fieldName, 'value'].join('::');
const buildNetworkLinks = (
configStepCount: number,
networkShortName: string,
interfaces: InitializeStrikerNetworkForm['interfaces'],
) =>
interfaces.reduce<string>((reduceContainer, iface, index) => {
let result = reduceContainer;
if (iface) {
const { networkInterfaceMACAddress } = iface;
result += `
${fvar(
configStepCount,
`${networkShortName}_link${index + 1}_mac_to_set`,
)}=${networkInterfaceMACAddress}`;
}
return result;
}, '');
export const configStriker: RequestHandler<
unknown,
undefined,
InitializeStrikerForm
> = ({ body }, response) => {
console.log('Begin initialize Striker.');
console.dir(body, { depth: null });
const {
adminPassword = '',
domainName = '',
hostName = '',
hostNumber = 0,
networkDNS = '',
networkGateway = '',
networks = [],
organizationName = '',
organizationPrefix = '',
} = body || {};
const dataAdminPassword = String(adminPassword);
const dataDomainName = String(domainName);
const dataHostName = String(hostName);
const dataHostNumber = String(hostNumber);
const dataNetworkDNS = String(networkDNS);
const dataNetworkGateway = String(networkGateway);
const dataOrganizationName = String(organizationName);
const dataOrganizationPrefix = String(organizationPrefix);
try {
assert(
!/['"/\\><}{]/g.test(dataAdminPassword),
`Data admin password cannot contain single-quote, double-quote, slash, backslash, angle brackets, and curly brackets; got [${dataAdminPassword}]`,
);
assert(
REP_DOMAIN.test(dataDomainName),
`Data domain name can only contain alphanumeric, hyphen, and dot characters; got [${dataDomainName}]`,
);
assert(
REP_DOMAIN.test(dataHostName),
`Data host name can only contain alphanumeric, hyphen, and dot characters; got [${dataHostName}]`,
);
assert(
REP_INTEGER.test(dataHostNumber) && hostNumber > 0,
`Data host number can only contain digits; got [${dataHostNumber}]`,
);
assert(
REP_IPV4_CSV.test(dataNetworkDNS),
`Data network DNS must be a comma separated list of valid IPv4 addresses; got [${dataNetworkDNS}]`,
);
assert(
REP_IPV4.test(dataNetworkGateway),
`Data network gateway must be a valid IPv4 address; got [${dataNetworkGateway}]`,
);
assert(
dataOrganizationName.length > 0,
`Data organization name cannot be empty; got [${dataOrganizationName}]`,
);
assert(
/^[a-z0-9]{1,5}$/.test(dataOrganizationPrefix),
`Data organization prefix can only contain 1 to 5 lowercase alphanumeric characters; got [${dataOrganizationPrefix}]`,
);
} catch (assertError) {
console.log(
`Failed to assert value when trying to initialize striker; CAUSE: ${assertError}.`,
);
response.status(400).send();
return;
}
try {
job({
file: __filename,
job_command: SERVER_PATHS.usr.sbin['anvil-configure-host'].self,
job_data: `${fvar(1, 'domain')}=${domainName}
${fvar(1, 'organization')}=${organizationName}
${fvar(1, 'prefix')}=${organizationPrefix}
${fvar(1, 'sequence')}=${hostNumber}
${fvar(2, 'dns')}=${networkDNS}
${fvar(2, 'gateway')}=${networkGateway}
${fvar(2, 'host_name')}=${hostName}
${fvar(2, 'striker_password')}=${adminPassword}
${fvar(2, 'striker_user')}=admin${
networks.reduce<{
counters: Record<InitializeStrikerNetworkForm['type'], number>;
result: string;
}>(
(reduceContainer, { interfaces, ipAddress, subnetMask, type }) => {
const { counters } = reduceContainer;
counters[type] = counters[type] ? counters[type] + 1 : 1;
const networkShortName = `${type}${counters[type]}`;
reduceContainer.result += `
${fvar(2, `${networkShortName}_ip`)}=${ipAddress}
${fvar(2, `${networkShortName}_subnet_mask`)}=${subnetMask}
${buildNetworkLinks(2, networkShortName, interfaces)}`;
return reduceContainer;
},
{ counters: {}, result: '' },
).result
}`,
job_name: 'configure::network',
job_title: 'job_0001',
job_description: 'job_0071',
});
} catch (subError) {
console.log(`Failed to queue striker initialization; CAUSE: ${subError}`);
response.status(500).send();
return;
}
response.status(200).send();
};

@ -0,0 +1,8 @@
import { RequestHandler } from 'express';
import { buildBranchRequestHandler } from '../buildBranchRequestHandler';
import { configStriker } from './configStriker';
export const createHost: RequestHandler = buildBranchRequestHandler({
striker: configStriker,
});

@ -0,0 +1,190 @@
import { RequestHandler } from 'express';
import SERVER_PATHS from '../../consts/SERVER_PATHS';
import {
getAnvilData,
getLocalHostUUID,
getPeerData,
job,
sub,
} from '../../accessModule';
import { sanitize } from '../../sanitize';
import { rm, stderr, stdoutVar } from '../../shell';
export const createHostConnection: RequestHandler<
unknown,
undefined,
CreateHostConnectionRequestBody
> = (request, response) => {
const {
body: {
dbName = 'anvil',
ipAddress,
isPing = false,
password,
port = 5432,
sshPort = 22,
user = 'admin',
},
} = request;
const commonDBName = sanitize(dbName, 'string');
const commonIsPing = sanitize(isPing, 'boolean');
const commonPassword = sanitize(password, 'string');
const commonDBPort = sanitize(port, 'number');
const commonDBUser = sanitize(user, 'string');
const peerIPAddress = sanitize(ipAddress, 'string');
const peerSSHPort = sanitize(sshPort, 'number');
const commonPing = commonIsPing ? 1 : 0;
let localDBPort: number;
let localIPAddress: string;
let isPeerReachable = false;
let isPeerDBReachable = false;
let peerHostUUID: string;
try {
({ hostUUID: peerHostUUID, isConnected: isPeerReachable } = getPeerData(
peerIPAddress,
{ password: commonPassword, port: peerSSHPort },
));
} catch (subError) {
stderr(`Failed to get peer data; CAUSE: ${subError}`);
response.status(500).send();
return;
}
stdoutVar({ peerHostUUID, isPeerReachable });
if (!isPeerReachable) {
stderr(
`Cannot connect to peer; please verify credentials and SSH keys validity.`,
);
response.status(400).send();
return;
}
try {
localIPAddress = sub('find_matching_ip', {
subModuleName: 'System',
subParams: { host: peerIPAddress },
}).stdout;
} catch (subError) {
stderr(`Failed to get matching IP address; CAUSE: ${subError}`);
response.status(500).send();
return;
}
stdoutVar({ localIPAddress });
const pgpassFilePath = '/tmp/.pgpass';
const pgpassFileBody = `${peerIPAddress}:${commonDBPort}:${commonDBName}:${commonDBUser}:${commonPassword.replace(
/:/g,
'\\:',
)}`;
stdoutVar({ pgpassFilePath, pgpassFileBody });
try {
sub('write_file', {
subModuleName: 'Storage',
subParams: {
body: pgpassFileBody,
file: pgpassFilePath,
mode: '0600',
overwrite: 1,
secure: 1,
},
});
} catch (subError) {
stderr(`Failed to write ${pgpassFilePath}; CAUSE: ${subError}`);
response.status(500).send();
return;
}
try {
const [rawIsPeerDBReachable] = sub('call', {
subModuleName: 'System',
subParams: {
shell_call: `PGPASSFILE="${pgpassFilePath}" ${SERVER_PATHS.usr.bin.psql.self} --host ${peerIPAddress} --port ${commonDBPort} --dbname ${commonDBName} --username ${commonDBUser} --no-password --tuples-only --no-align --command "SELECT 1"`,
},
}).stdout as [output: string, returnCode: number];
isPeerDBReachable = rawIsPeerDBReachable === '1';
} catch (subError) {
stderr(`Failed to test connection to peer database; CAUSE: ${subError}`);
}
try {
rm(pgpassFilePath);
} catch (fsError) {
stderr(`Failed to remove ${pgpassFilePath}; CAUSE: ${fsError}`);
response.status(500).send();
return;
}
stdoutVar({ isPeerDBReachable });
if (!isPeerDBReachable) {
stderr(
`Cannot connect to peer database; please verify database credentials.`,
);
response.status(400).send();
return;
}
const localHostUUID = getLocalHostUUID();
try {
const {
database: {
[localHostUUID]: { port: rawLocalDBPort },
},
} = getAnvilData({ database: true }) as { database: DatabaseHash };
localDBPort = sanitize(rawLocalDBPort, 'number');
} catch (subError) {
stderr(`Failed to get local database data from hash; CAUSE: ${subError}`);
response.status(500).send();
return;
}
const jobCommand = `${SERVER_PATHS.usr.sbin['striker-manage-peers'].self} --add --host-uuid ${peerHostUUID} --host ${peerIPAddress} --port ${commonDBPort} --ping ${commonPing}`;
const peerJobCommand = `${SERVER_PATHS.usr.sbin['striker-manage-peers'].self} --add --host-uuid ${localHostUUID} --host ${localIPAddress} --port ${localDBPort} --ping ${commonPing}`;
try {
job({
file: __filename,
job_command: jobCommand,
job_data: `password=${commonPassword}
peer_job_command=${peerJobCommand}`,
job_description: 'job_0012',
job_name: 'striker-peer::add',
job_title: 'job_0011',
});
} catch (subError) {
stderr(`Failed to add peer ${peerHostUUID}; CAUSE: ${subError}`);
response.status(500).send();
return;
}
response.status(201).send();
};

@ -0,0 +1,42 @@
import { RequestHandler } from 'express';
import SERVER_PATHS from '../../consts/SERVER_PATHS';
import { job } from '../../accessModule';
import { toHostUUID } from '../../convertHostUUID';
import { stderr } from '../../shell';
export const deleteHostConnection: RequestHandler<
unknown,
undefined,
DeleteHostConnectionRequestBody
> = (request, response) => {
const { body } = request;
const hostUUIDs = Object.keys(body);
hostUUIDs.forEach((key) => {
const hostUUID = toHostUUID(key);
const peerHostUUIDs = body[key];
peerHostUUIDs.forEach((peerHostUUID) => {
try {
job({
file: __filename,
job_command: `${SERVER_PATHS.usr.sbin['striker-manage-peers'].self} --remove --host-uuid ${peerHostUUID}`,
job_description: 'job_0014',
job_host_uuid: hostUUID,
job_name: 'striker-peer::delete',
job_title: 'job_0013',
});
} catch (subError) {
stderr(`Failed to delete peer ${peerHostUUID}; CAUSE: ${subError}`);
response.status(500).send();
return;
}
});
});
response.status(204).send();
};

@ -0,0 +1,48 @@
import { getLocalHostUUID } from '../../accessModule';
import buildGetRequestHandler from '../buildGetRequestHandler';
import { buildQueryHostDetail } from './buildQueryHostDetail';
import { buildQueryResultReducer } from '../../buildQueryResultModifier';
import { toLocal } from '../../convertHostUUID';
import { getShortHostName } from '../../getShortHostName';
import { sanitize } from '../../sanitize';
export const getHost = buildGetRequestHandler((request, buildQueryOptions) => {
const { hostUUIDs } = request.query;
const localHostUUID: string = getLocalHostUUID();
let query = `
SELECT
hos.host_name,
hos.host_uuid
FROM hosts AS hos;`;
let afterQueryReturn: QueryResultModifierFunction | undefined =
buildQueryResultReducer<{ [hostUUID: string]: HostOverview }>(
(previous, [hostName, hostUUID]) => {
const key = toLocal(hostUUID, localHostUUID);
previous[key] = {
hostName,
hostUUID,
shortHostName: getShortHostName(hostName),
};
return previous;
},
{},
);
if (hostUUIDs) {
({ query, afterQueryReturn } = buildQueryHostDetail({
keys: sanitize(hostUUIDs, 'string[]', {
modifierType: 'sql',
}),
}));
}
if (buildQueryOptions) {
buildQueryOptions.afterQueryReturn = afterQueryReturn;
}
return query;
});

@ -0,0 +1,128 @@
import { getAnvilData, getLocalHostUUID } from '../../accessModule';
import { buildUnknownIDCondition } from '../../buildCondition';
import buildGetRequestHandler from '../buildGetRequestHandler';
import { toLocal } from '../../convertHostUUID';
import { match } from '../../match';
import { stdout } from '../../shell';
const buildHostConnections = (
fromHostUUID: string,
databaseHash: DatabaseHash,
{
defaultPort = 5432,
defaultUser = 'admin',
}: { defaultPort?: number; defaultUser?: string } = {},
) =>
Object.entries(databaseHash).reduce<HostConnectionOverview>(
(previous, [hostUUID, { host: ipAddress, ping, port: rawPort, user }]) => {
const port = parseInt(rawPort);
if (hostUUID === fromHostUUID) {
previous.inbound.port = port;
previous.inbound.user = user;
} else {
previous.peer[ipAddress] = {
hostUUID,
ipAddress,
isPing: ping === '1',
port,
user,
};
}
return previous;
},
{
inbound: { ipAddress: {}, port: defaultPort, user: defaultUser },
peer: {},
},
);
export const getHostConnection = buildGetRequestHandler(
(request, buildQueryOptions) => {
const { hostUUIDs: rawHostUUIDs } = request.query;
let rawDatabaseData: DatabaseHash;
const hostUUIDField = 'ip_add.ip_address_host_uuid';
const localHostUUID: string = getLocalHostUUID();
const { after: condHostUUIDs, before: beforeBuildIDCond } =
buildUnknownIDCondition(rawHostUUIDs, hostUUIDField, {
onFallback: () => `${hostUUIDField} = '${localHostUUID}'`,
});
const hostUUIDs =
beforeBuildIDCond.length > 0 ? beforeBuildIDCond : [localHostUUID];
const getConnectionKey = (hostUUID: string) =>
toLocal(hostUUID, localHostUUID);
stdout(`condHostUUIDs=[${condHostUUIDs}]`);
try {
({ database: rawDatabaseData } = getAnvilData({ database: true }));
} catch (subError) {
throw new Error(`Failed to get anvil data; CAUSE: ${subError}`);
}
const connections = hostUUIDs.reduce<{
[hostUUID: string]: HostConnectionOverview;
}>((previous, hostUUID) => {
const connectionKey = getConnectionKey(hostUUID);
previous[connectionKey] = buildHostConnections(hostUUID, rawDatabaseData);
return previous;
}, {});
stdout(`connections=[${JSON.stringify(connections, null, 2)}]`);
if (buildQueryOptions) {
buildQueryOptions.afterQueryReturn = (queryStdout) => {
let result = queryStdout;
if (queryStdout instanceof Array) {
queryStdout.forEach(
([ipAddressUUID, hostUUID, ipAddress, network]) => {
const [, networkType, rawNetworkNumber, rawNetworkLinkNumber] =
match(network, /^([^\s]+)(\d+)_[^\s]+(\d+)$/);
const connectionKey = getConnectionKey(hostUUID);
connections[connectionKey].inbound.ipAddress[ipAddress] = {
hostUUID,
ipAddress,
ipAddressUUID,
networkLinkNumber: parseInt(rawNetworkLinkNumber),
networkNumber: parseInt(rawNetworkNumber),
networkType,
};
},
);
result = connections;
}
return result;
};
}
return `SELECT
ip_add.ip_address_uuid,
ip_add.ip_address_host_uuid,
ip_add.ip_address_address,
CASE
WHEN ip_add.ip_address_on_type = 'interface'
THEN net_int.network_interface_name
ELSE bon.bond_active_interface
END AS network_name
FROM ip_addresses AS ip_add
LEFT JOIN network_interfaces AS net_int
ON ip_add.ip_address_on_uuid = net_int.network_interface_uuid
LEFT JOIN bridges AS bri
ON ip_add.ip_address_on_uuid = bri.bridge_uuid
LEFT JOIN bonds AS bon
ON bri.bridge_uuid = bon.bond_bridge_uuid
OR ip_add.ip_address_on_uuid = bon.bond_uuid
WHERE ${condHostUUIDs}
AND ip_add.ip_address_note != 'DELETED';`;
},
);

@ -0,0 +1,19 @@
import buildGetRequestHandler from '../buildGetRequestHandler';
import { buildQueryHostDetail } from './buildQueryHostDetail';
import { toHostUUID } from '../../convertHostUUID';
import { sanitizeSQLParam } from '../../sanitizeSQLParam';
export const getHostDetail = buildGetRequestHandler(
({ params: { hostUUID: rawHostUUID } }, buildQueryOptions) => {
const hostUUID = toHostUUID(rawHostUUID);
const { afterQueryReturn, query } = buildQueryHostDetail({
keys: [sanitizeSQLParam(hostUUID)],
});
if (buildQueryOptions) {
buildQueryOptions.afterQueryReturn = afterQueryReturn;
}
return query;
},
);

@ -0,0 +1,8 @@
export * from './createHost';
export * from './createHostConnection';
export * from './deleteHostConnection';
export * from './getHost';
export * from './getHostConnection';
export * from './getHostDetail';
export * from './prepareHost';
export * from './updateHost';

@ -0,0 +1,150 @@
import assert from 'assert';
import { RequestHandler } from 'express';
import {
REP_DOMAIN,
REP_IPV4,
REP_PEACEFUL_STRING,
REP_UUID,
} from '../../consts/REG_EXP_PATTERNS';
import SERVER_PATHS from '../../consts/SERVER_PATHS';
import { job, variable } from '../../accessModule';
import { sanitize } from '../../sanitize';
import { stderr } from '../../shell';
export const prepareHost: RequestHandler<
unknown,
undefined,
PrepareHostRequestBody
> = (request, response) => {
const {
body: {
enterpriseUUID,
hostIPAddress,
hostName,
hostPassword,
hostSSHPort,
hostType,
hostUser = 'root',
hostUUID,
redhatPassword,
redhatUser,
} = {},
} = request;
const isEnterpriseUUIDProvided = Boolean(enterpriseUUID);
const isHostUUIDProvided = Boolean(hostUUID);
const isRedhatAccountProvided =
Boolean(redhatPassword) || Boolean(redhatUser);
const dataEnterpriseUUID = sanitize(enterpriseUUID, 'string');
const dataHostIPAddress = sanitize(hostIPAddress, 'string');
const dataHostName = sanitize(hostName, 'string');
const dataHostPassword = sanitize(hostPassword, 'string');
const dataHostSSHPort = sanitize(hostSSHPort, 'number') || 22;
const dataHostType = sanitize(hostType, 'string');
// Host user is unused at the moment.
const dataHostUser = sanitize(hostUser, 'string');
const dataHostUUID = sanitize(hostUUID, 'string');
const dataRedhatPassword = sanitize(redhatPassword, 'string');
const dataRedhatUser = sanitize(redhatUser, 'string');
try {
assert(
REP_IPV4.test(dataHostIPAddress),
`Data host IP address must be a valid IPv4 address; got [${dataHostIPAddress}]`,
);
assert(
REP_DOMAIN.test(dataHostName),
`Data host name can only contain alphanumeric, hyphen, and dot characters; got [${dataHostName}]`,
);
assert(
REP_PEACEFUL_STRING.test(dataHostPassword),
`Data host password must be peaceful string; got [${dataHostPassword}]`,
);
assert(
/^node|dr$/.test(dataHostType),
`Data host type must be one of "node" or "dr"; got [${dataHostType}]`,
);
assert(
REP_PEACEFUL_STRING.test(dataHostUser),
`Data host user must be a peaceful string; got [${dataHostUser}]`,
);
if (isEnterpriseUUIDProvided) {
assert(
REP_UUID.test(dataEnterpriseUUID),
`Data enterprise UUID must be a valid UUIDv4; got [${dataEnterpriseUUID}]`,
);
}
if (isHostUUIDProvided) {
assert(
REP_UUID.test(dataHostUUID),
`Data host UUID must be a valid UUIDv4; got [${dataHostUUID}]`,
);
}
if (isRedhatAccountProvided) {
assert(
REP_PEACEFUL_STRING.test(dataRedhatPassword),
`Data redhat password must be a peaceful string; got [${dataRedhatPassword}]`,
);
assert(
REP_PEACEFUL_STRING.test(dataRedhatUser),
`Data redhat user must be a peaceful string; got [${dataRedhatUser}]`,
);
}
} catch (assertError) {
stderr(
`Failed to assert value when trying to prepare host; CAUSE: ${assertError}`,
);
response.status(400).send();
return;
}
try {
if (isHostUUIDProvided) {
variable({
file: __filename,
update_value_only: 1,
variable_name: 'system::configured',
variable_source_table: 'hosts',
variable_source_uuid: dataHostUUID,
variable_value: 0,
});
}
job({
file: __filename,
job_command: SERVER_PATHS.usr.sbin['striker-initialize-host'].self,
job_data: `enterprise_uuid=${dataEnterpriseUUID}
host_ip_address=${dataHostIPAddress}
host_name=${dataHostName}
password=${dataHostPassword}
rh_password=${dataRedhatPassword}
rh_user=${dataRedhatUser}
ssh_port=${dataHostSSHPort}
type=${dataHostType}`,
job_description: 'job_0022',
job_name: `initialize::${dataHostType}::${dataHostIPAddress}`,
job_title: `job_002${dataHostType === 'dr' ? '1' : '0'}`,
});
} catch (subError) {
stderr(`Failed to init host; CAUSE: ${subError}`);
response.status(500).send();
return;
}
response.status(200).send();
};

@ -0,0 +1,39 @@
import { RequestHandler } from 'express';
import { LOCAL } from '../../consts/LOCAL';
import SERVER_PATHS from '../../consts/SERVER_PATHS';
import { job } from '../../accessModule';
import { stderr, stdout } from '../../shell';
export const setHostInstallTarget: RequestHandler = (request, response) => {
stdout(
`Begin set host install target.\n${JSON.stringify(request.body, null, 2)}`,
);
const { isEnableInstallTarget } =
request.body as SetHostInstallTargetRequestBody;
const { hostUUID: rawHostUUID } = request.params as UpdateHostParams;
const hostUUID: string | undefined =
rawHostUUID === LOCAL ? undefined : rawHostUUID;
const task = isEnableInstallTarget ? 'enable' : 'disable';
try {
job({
file: __filename,
job_command: `${SERVER_PATHS.usr.sbin['striker-manage-install-target'].self} --${task}`,
job_description: 'job_0016',
job_host_uuid: hostUUID,
job_name: `install-target::${task}`,
job_title: 'job_0015',
});
} catch (subError) {
stderr(`Failed to ${task} install target; CAUSE: ${subError}`);
response.status(500).send();
return;
}
response.status(200).send();
};

@ -0,0 +1,8 @@
import { RequestHandler } from 'express';
import { buildBranchRequestHandler } from '../buildBranchRequestHandler';
import { setHostInstallTarget } from './setHostInstallTarget';
export const updateHost: RequestHandler = buildBranchRequestHandler({
'install-target': setHostInstallTarget,
});

@ -0,0 +1,82 @@
import buildGetRequestHandler from '../buildGetRequestHandler';
import { sanitize } from '../../sanitize';
import { date, stdout } from '../../shell';
export const getJob = buildGetRequestHandler((request, buildQueryOptions) => {
const { start: rawStart } = request.query;
const start = sanitize(rawStart, 'number');
let condModifiedDate = '';
try {
const minDate = date('--date', `@${start}`, '--rfc-3339', 'ns');
condModifiedDate = `OR (job.job_progress = 100 AND job.modified_date >= '${minDate}')`;
} catch (shellError) {
throw new Error(
`Failed to build date condition for job query; CAUSE: ${shellError}`,
);
}
stdout(`condModifiedDate=[${condModifiedDate}]`);
if (buildQueryOptions) {
buildQueryOptions.afterQueryReturn = (queryStdout) => {
let result = queryStdout;
if (queryStdout instanceof Array) {
result = queryStdout.reduce<{
[jobUUID: string]: {
jobCommand: string;
jobHostName: string;
jobHostUUID: string;
jobName: string;
jobProgress: number;
jobUUID: string;
};
}>(
(
previous,
[
jobUUID,
jobName,
jobHostUUID,
jobHostName,
jobCommand,
rawJobProgress,
],
) => {
previous[jobUUID] = {
jobCommand,
jobHostName,
jobHostUUID,
jobName,
jobProgress: parseFloat(rawJobProgress),
jobUUID,
};
return previous;
},
{},
);
}
return result;
};
}
return `
SELECT
job.job_uuid,
job.job_name,
job.job_host_uuid,
hos.host_name,
job.job_command,
job.job_progress
FROM jobs AS job
JOIN hosts AS hos
ON job.job_host_uuid = hos.host_uuid
WHERE job.job_progress < 100
${condModifiedDate};`;
});

@ -0,0 +1,54 @@
import { getLocalHostUUID } from '../../accessModule';
import buildGetRequestHandler from '../buildGetRequestHandler';
export const getNetworkInterface = buildGetRequestHandler(
(request, buildQueryOptions) => {
const localHostUUID: string = getLocalHostUUID();
if (buildQueryOptions) {
buildQueryOptions.afterQueryReturn = (queryStdout) => {
let result = queryStdout;
if (queryStdout instanceof Array) {
result = queryStdout.map<NetworkInterfaceOverview>(
([
networkInterfaceUUID,
networkInterfaceMACAddress,
networkInterfaceName,
networkInterfaceState,
networkInterfaceSpeed,
networkInterfaceOrder,
]) => ({
networkInterfaceUUID,
networkInterfaceMACAddress,
networkInterfaceName,
networkInterfaceState,
networkInterfaceSpeed,
networkInterfaceOrder,
}),
);
}
return result;
};
}
return `
SELECT
network_interface_uuid,
network_interface_mac_address,
network_interface_name,
CASE
WHEN network_interface_link_state = '1'
AND network_interface_operational = 'up'
THEN 'up'
ELSE 'down'
END AS network_interface_state,
network_interface_speed,
ROW_NUMBER() OVER(ORDER BY modified_date ASC) AS network_interface_order
FROM network_interfaces
WHERE network_interface_operational != 'DELETE'
AND network_interface_host_uuid = '${localHostUUID}';`;
},
);

@ -0,0 +1 @@
export { getNetworkInterface } from './getNetworkInterface';

@ -0,0 +1,158 @@
import assert from 'assert';
import { RequestHandler } from 'express';
import { OS_LIST_MAP } from '../../consts/OS_LIST';
import { REP_INTEGER, REP_UUID } from '../../consts/REG_EXP_PATTERNS';
import SERVER_PATHS from '../../consts/SERVER_PATHS';
import { dbQuery, job } from '../../accessModule';
import { stderr, stdout } from '../../shell';
export const createServer: RequestHandler = ({ body }, response) => {
stdout(`Creating server.\n${JSON.stringify(body, null, 2)}`);
const {
serverName,
cpuCores,
memory,
virtualDisks: [
{ storageSize = undefined, storageGroupUUID = undefined } = {},
] = [],
installISOFileUUID,
driverISOFileUUID,
anvilUUID,
optimizeForOS,
} = body || {};
const dataServerName = String(serverName);
const dataOS = String(optimizeForOS);
const dataCPUCores = String(cpuCores);
const dataRAM = String(memory);
const dataStorageGroupUUID = String(storageGroupUUID);
const dataStorageSize = String(storageSize);
const dataInstallISO = String(installISOFileUUID);
const dataDriverISO = String(driverISOFileUUID) || 'none';
const dataAnvilUUID = String(anvilUUID);
try {
assert(
/^[0-9a-z_-]+$/i.test(dataServerName),
`Data server name can only contain alphanumeric, underscore, and hyphen characters; got [${dataServerName}].`,
);
const [[serverNameCount]] = dbQuery(
`SELECT COUNT(server_uuid) FROM servers WHERE server_name = '${dataServerName}'`,
).stdout;
assert(
serverNameCount === 0,
`Data server name already exists; got [${dataServerName}]`,
);
assert(
OS_LIST_MAP[dataOS] !== undefined,
`Data OS not recognized; got [${dataOS}].`,
);
assert(
REP_INTEGER.test(dataCPUCores),
`Data CPU cores can only contain digits; got [${dataCPUCores}].`,
);
assert(
REP_INTEGER.test(dataRAM),
`Data RAM can only contain digits; got [${dataRAM}].`,
);
assert(
REP_UUID.test(dataStorageGroupUUID),
`Data storage group UUID must be a valid UUID; got [${dataStorageGroupUUID}].`,
);
assert(
REP_INTEGER.test(dataStorageSize),
`Data storage size can only contain digits; got [${dataStorageSize}].`,
);
assert(
REP_UUID.test(dataInstallISO),
`Data install ISO must be a valid UUID; got [${dataInstallISO}].`,
);
assert(
dataDriverISO === 'none' || REP_UUID.test(dataDriverISO),
`Data driver ISO must be a valid UUID when provided; got [${dataDriverISO}].`,
);
assert(
REP_UUID.test(dataAnvilUUID),
`Data anvil UUID must be a valid UUID; got [${dataAnvilUUID}].`,
);
} catch (assertError) {
stdout(
`Failed to assert value when trying to provision a server; CAUSE: ${assertError}.`,
);
response.status(400).send();
return;
}
const provisionServerJobData = `server_name=${dataServerName}
os=${dataOS}
cpu_cores=${dataCPUCores}
ram=${dataRAM}
storage_group_uuid=${dataStorageGroupUUID}
storage_size=${dataStorageSize}
install_iso=${dataInstallISO}
driver_iso=${dataDriverISO}`;
stdout(`provisionServerJobData=[${provisionServerJobData}]`);
const [[provisionServerJobHostUUID]] = dbQuery(
`SELECT
CASE
WHEN pri_hos.primary_host_uuid IS NULL
THEN nod_1.node1_host_uuid
ELSE pri_hos.primary_host_uuid
END AS host_uuid
FROM (
SELECT
1 AS phl,
sca_clu_nod.scan_cluster_node_host_uuid AS primary_host_uuid
FROM anvils AS anv
JOIN scan_cluster_nodes AS sca_clu_nod
ON sca_clu_nod.scan_cluster_node_host_uuid = anv.anvil_node1_host_uuid
OR sca_clu_nod.scan_cluster_node_host_uuid = anv.anvil_node2_host_uuid
WHERE sca_clu_nod.scan_cluster_node_in_ccm
AND sca_clu_nod.scan_cluster_node_crmd_member
AND sca_clu_nod.scan_cluster_node_cluster_member
AND (NOT sca_clu_nod.scan_cluster_node_maintenance_mode)
AND anv.anvil_uuid = '${dataAnvilUUID}'
ORDER BY sca_clu_nod.scan_cluster_node_name
LIMIT 1
) AS pri_hos
RIGHT JOIN (
SELECT
1 AS phr,
anv.anvil_node1_host_uuid AS node1_host_uuid
FROM anvils AS anv
WHERE anv.anvil_uuid = '${dataAnvilUUID}'
) AS nod_1
ON pri_hos.phl = nod_1.phr;`,
).stdout;
stdout(`provisionServerJobHostUUID=[${provisionServerJobHostUUID}]`);
try {
job({
file: __filename,
job_command: SERVER_PATHS.usr.sbin['anvil-provision-server'].self,
job_data: provisionServerJobData,
job_name: 'server:provision',
job_title: 'job_0147',
job_description: 'job_0148',
job_host_uuid: provisionServerJobHostUUID,
});
} catch (subError) {
stderr(`Failed to provision server; CAUSE: ${subError}`);
response.status(500).send();
return;
}
response.status(202).send();
};

@ -0,0 +1,60 @@
import buildGetRequestHandler from '../buildGetRequestHandler';
import join from '../../join';
import { sanitize } from '../../sanitize';
export const getServer = buildGetRequestHandler(
(request, buildQueryOptions) => {
const { anvilUUIDs } = request.query;
const condAnvilUUIDs = join(sanitize(anvilUUIDs, 'string[]'), {
beforeReturn: (toReturn) =>
toReturn ? `AND ser.server_anvil_uuid IN (${toReturn})` : '',
elementWrapper: "'",
separator: ', ',
});
console.log(`condAnvilsUUID=[${condAnvilUUIDs}]`);
if (buildQueryOptions) {
buildQueryOptions.afterQueryReturn = (queryStdout) => {
let result = queryStdout;
if (queryStdout instanceof Array) {
result = queryStdout.map<ServerOverview>(
([
serverUUID,
serverName,
serverState,
serverHostUUID,
anvilUUID,
anvilName,
]) => ({
serverHostUUID,
serverName,
serverState,
serverUUID,
anvilUUID,
anvilName,
}),
);
}
return result;
};
}
return `
SELECT
ser.server_uuid,
ser.server_name,
ser.server_state,
ser.server_host_uuid,
anv.anvil_uuid,
anv.anvil_name
FROM servers AS ser
JOIN anvils AS anv
ON ser.server_anvil_uuid = anv.anvil_uuid
WHERE ser.server_state != 'DELETED'
${condAnvilUUIDs};`;
},
);

@ -0,0 +1,152 @@
import assert from 'assert';
import { RequestHandler } from 'express';
import { createReadStream } from 'fs';
import path from 'path';
import { REP_UUID } from '../../consts/REG_EXP_PATTERNS';
import SERVER_PATHS from '../../consts/SERVER_PATHS';
import { dbQuery, getLocalHostUUID, job } from '../../accessModule';
import { sanitize } from '../../sanitize';
import { mkfifo, rm } from '../../shell';
export const getServerDetail: RequestHandler = (request, response) => {
const { serverUUID } = request.params;
const { ss, resize } = request.query;
const epoch = Date.now();
const isScreenshot = sanitize(ss, 'boolean');
console.log(
`serverUUID=[${serverUUID}],epoch=[${epoch}],isScreenshot=[${isScreenshot}]`,
);
try {
assert(
REP_UUID.test(serverUUID),
`Server UUID must be a valid UUID; got [${serverUUID}]`,
);
} catch (assertError) {
console.log(
`Failed to assert value when trying to get server detail; CAUSE: ${assertError}.`,
);
response.status(500).send();
return;
}
if (isScreenshot) {
let requestHostUUID: string, serverHostUUID: string;
try {
requestHostUUID = getLocalHostUUID();
} catch (subError) {
console.log(subError);
response.status(500).send();
return;
}
console.log(`requestHostUUID=[${requestHostUUID}]`);
try {
[[serverHostUUID]] = dbQuery(`
SELECT server_host_uuid
FROM servers
WHERE server_uuid = '${serverUUID}';`).stdout;
} catch (queryError) {
console.log(`Failed to get server host UUID; CAUSE: ${queryError}`);
response.status(500).send();
return;
}
console.log(`serverHostUUID=[${serverHostUUID}]`);
const imageFileName = `${serverUUID}_screenshot_${epoch}`;
const imageFilePath = path.join(SERVER_PATHS.tmp.self, imageFileName);
try {
mkfifo(imageFilePath);
const namedPipeReadStream = createReadStream(imageFilePath, {
autoClose: true,
encoding: 'utf-8',
});
let imageData = '';
namedPipeReadStream.once('close', () => {
console.log(`On close; removing named pipe at ${imageFilePath}.`);
try {
rm(imageFilePath);
} catch (cleanPipeError) {
console.log(
`Failed to clean up named pipe; CAUSE: ${cleanPipeError}`,
);
}
});
namedPipeReadStream.once('end', () => {
response.status(200).send({ screenshot: imageData });
});
namedPipeReadStream.on('data', (data) => {
const imageChunk = data.toString().trim();
const chunkLogLength = 10;
console.log(
`${serverUUID} image chunk: ${imageChunk.substring(
0,
chunkLogLength,
)}...${imageChunk.substring(imageChunk.length - chunkLogLength - 1)}`,
);
imageData += imageChunk;
});
} catch (prepPipeError) {
console.log(`Failed to prepare named pipe; CAUSE: ${prepPipeError}`);
response.status(500).send();
return;
}
let resizeArgs = sanitize(resize, 'string');
if (!/^\d+x\d+$/.test(resizeArgs)) {
resizeArgs = '';
}
try {
job({
file: __filename,
job_command: SERVER_PATHS.usr.sbin['anvil-get-server-screenshot'].self,
job_data: `server-uuid=${serverUUID}
request-host-uuid=${requestHostUUID}
resize=${resizeArgs}
out-file-id=${epoch}`,
job_name: `get_server_screenshot::${serverUUID}::${epoch}`,
job_title: 'job_0356',
job_description: 'job_0357',
job_host_uuid: serverHostUUID,
});
} catch (subError) {
console.log(
`Failed to queue fetch server screenshot job; CAUSE: ${subError}`,
);
response.status(500).send();
return;
}
} else {
// For getting sever detail data.
response.status(200).send();
}
};

@ -0,0 +1,3 @@
export { createServer } from './createServer';
export { getServer } from './getServer';
export { getServerDetail } from './getServerDetail';

@ -0,0 +1,41 @@
import { RequestHandler } from 'express';
import SERVER_PATHS from '../../consts/SERVER_PATHS';
import { job } from '../../accessModule';
import { toHostUUID } from '../../convertHostUUID';
import { stderr } from '../../shell';
export const deleteSSHKeyConflict: RequestHandler<
unknown,
undefined,
DeleteSSHKeyConflictRequestBody
> = (request, response) => {
const { body } = request;
const hostUUIDs = Object.keys(body);
hostUUIDs.forEach((key) => {
const hostUUID = toHostUUID(key);
const stateUUIDs = body[key];
try {
job({
file: __filename,
job_command: SERVER_PATHS.usr.sbin['anvil-manage-keys'].self,
job_data: stateUUIDs.join(','),
job_description: 'job_0057',
job_host_uuid: hostUUID,
job_name: 'manage::broken_keys',
job_title: 'job_0056',
});
} catch (subError) {
stderr(`Failed to delete bad SSH keys; CAUSE: ${subError}`);
response.status(500).send();
return;
}
});
response.status(204).send();
};

@ -0,0 +1,66 @@
import { HOST_KEY_CHANGED_PREFIX } from '../../consts/HOST_KEY_CHANGED_PREFIX';
import { getLocalHostUUID } from '../../accessModule';
import buildGetRequestHandler from '../buildGetRequestHandler';
import { buildQueryResultReducer } from '../../buildQueryResultModifier';
import { toLocal } from '../../convertHostUUID';
import { match } from '../../match';
export const getSSHKeyConflict = buildGetRequestHandler(
(request, buildQueryOptions) => {
const localHostUUID: string = getLocalHostUUID();
const query = `
SELECT
hos.host_name,
hos.host_uuid,
sta.state_name,
sta.state_note,
sta.state_uuid
FROM states AS sta
JOIN hosts AS hos
ON sta.state_host_uuid = hos.host_uuid
WHERE sta.state_name LIKE '${HOST_KEY_CHANGED_PREFIX}%';`;
const afterQueryReturn = buildQueryResultReducer<{
[hostUUID: string]: {
[stateUUID: string]: {
badFile: string;
badLine: number;
hostName: string;
hostUUID: string;
ipAddress: string;
stateUUID: string;
};
};
}>((previous, [hostName, hostUUID, stateName, stateNote, stateUUID]) => {
const hostUUIDKey = toLocal(hostUUID, localHostUUID);
if (previous[hostUUIDKey] === undefined) {
previous[hostUUIDKey] = {};
}
const ipAddress = stateName.slice(HOST_KEY_CHANGED_PREFIX.length);
const [, badFile, badLine = '0'] = match(
stateNote,
/file=([^\s]+),line=(\d+)/,
);
previous[hostUUIDKey][stateUUID] = {
badFile,
badLine: parseInt(badLine),
hostName,
hostUUID,
ipAddress,
stateUUID,
};
return previous;
}, {});
if (buildQueryOptions) {
buildQueryOptions.afterQueryReturn = afterQueryReturn;
}
return query;
},
);

@ -0,0 +1,2 @@
export * from './deleteSSHKeyConflict';
export * from './getSSHKeyConflict';

@ -0,0 +1,27 @@
import buildGetRequestHandler from '../buildGetRequestHandler';
import { buildQueryResultReducer } from '../../buildQueryResultModifier';
export const getUser = buildGetRequestHandler((request, buildQueryOptions) => {
const query = `
SELECT
use.user_name,
use.user_uuid
FROM users AS use;`;
const afterQueryReturn: QueryResultModifierFunction | undefined =
buildQueryResultReducer<
Record<string, { userName: string; userUUID: string }>
>((previous, [userName, userUUID]) => {
previous[userUUID] = {
userName,
userUUID,
};
return previous;
}, {});
if (buildQueryOptions) {
buildQueryOptions.afterQueryReturn = afterQueryReturn;
}
return query;
});

@ -0,0 +1,71 @@
import call from './call';
import { sanitizeSQLParam } from './sanitizeSQLParam';
type MapToReturnType = {
boolean: boolean;
number: number;
string: string;
'string[]': string[];
};
type MapToReturnFunction = {
[ReturnTypeName in keyof MapToReturnType]: (
value: unknown,
modifier: (unmodified: unknown) => string,
) => MapToReturnType[ReturnTypeName];
};
type ModifierFunction = (unmodified: string) => string;
type MapToModifierFunction = {
none: undefined;
sql: ModifierFunction;
};
const MAP_TO_MODIFIER_FUNCTION: MapToModifierFunction = {
none: undefined,
sql: sanitizeSQLParam,
};
const MAP_TO_RETURN_FUNCTION: MapToReturnFunction = {
boolean: (value) => value !== undefined,
number: (value) => parseFloat(String(value)) || 0,
string: (value, mod) => (value ? mod(value) : ''),
'string[]': (value, mod) => {
let result: string[] = [];
if (value instanceof Array) {
result = value.reduce<string[]>((reduceContainer, element) => {
if (element) {
reduceContainer.push(mod(element));
}
return reduceContainer;
}, []);
} else if (value) {
result = mod(value).split(/[,;]/);
}
return result;
},
};
export const sanitize = <ReturnTypeName extends keyof MapToReturnType>(
value: unknown,
returnType: ReturnTypeName,
{
modifierType = 'none',
modifier = MAP_TO_MODIFIER_FUNCTION[modifierType],
}: {
modifier?: ModifierFunction;
modifierType?: keyof MapToModifierFunction;
} = {},
): MapToReturnType[ReturnTypeName] =>
MAP_TO_RETURN_FUNCTION[returnType](value, (unmodified: unknown) => {
const input = String(unmodified);
return call<string>(modifier, {
notCallableReturn: input,
parameters: [input],
});
}) as MapToReturnType[ReturnTypeName];

@ -0,0 +1,2 @@
export const sanitizeSQLParam = (variable: string): string =>
variable.replace(/[']/g, '');

@ -0,0 +1,46 @@
import { spawnSync } from 'child_process';
import SERVER_PATHS from './consts/SERVER_PATHS';
const print = (
message: string,
{
eol = '\n',
stream = 'stdout',
}: { eol?: string; stream?: 'stderr' | 'stdout' } = {},
) => process[stream].write(`${message}${eol}`);
const systemCall = (
...[command, args = [], options = {}]: Parameters<typeof spawnSync>
) => {
const { error, stderr, stdout } = spawnSync(command, args, {
...options,
encoding: 'utf-8',
});
if (error) {
throw error;
}
if (stderr) {
throw new Error(stderr);
}
return stdout;
};
export const date = (...args: string[]) =>
systemCall(SERVER_PATHS.usr.bin.date.self, args);
export const mkfifo = (...args: string[]) =>
systemCall(SERVER_PATHS.usr.bin.mkfifo.self, args);
export const rm = (...args: string[]) =>
systemCall(SERVER_PATHS.usr.bin.rm.self, args);
export const stderr = (message: string) => print(message, { stream: 'stderr' });
export const stdout = (message: string) => print(message);
export const stdoutVar = (variable: { [name: string]: unknown }) =>
print(`Variables: ${JSON.stringify(variable, null, 2)}`);

@ -1,6 +1,6 @@
const multer = require('multer'); import multer from 'multer';
const SERVER_PATHS = require('../lib/consts/SERVER_PATHS'); import SERVER_PATHS from '../lib/consts/SERVER_PATHS';
const storage = multer.diskStorage({ const storage = multer.diskStorage({
destination: (request, file, callback) => { destination: (request, file, callback) => {
@ -13,4 +13,4 @@ const storage = multer.diskStorage({
const uploadSharedFiles = multer({ storage }); const uploadSharedFiles = multer({ storage });
module.exports = uploadSharedFiles; export default uploadSharedFiles;

@ -0,0 +1,9 @@
import express from 'express';
import getAnvil from '../lib/request_handlers/anvil/getAnvil';
const router = express.Router();
router.get('/', getAnvil);
export default router;

@ -0,0 +1,18 @@
import express from 'express';
import {
getHostSSH,
poweroffHost,
rebootHost,
updateSystem,
} from '../lib/request_handlers/command';
const router = express.Router();
router
.put('/inquire-host', getHostSSH)
.put('/poweroff-host', poweroffHost)
.put('/reboot-host', rebootHost)
.put('/update-system', updateSystem);
export default router;

@ -1,4 +1,4 @@
const express = require('express'); import express from 'express';
const router = express.Router(); const router = express.Router();
@ -14,4 +14,4 @@ router
response.status(200).send({ message }); response.status(200).send({ message });
}); });
module.exports = router; export default router;

@ -1,14 +1,14 @@
const express = require('express'); import express from 'express';
const { import {
dbJobAnvilSyncShared, dbJobAnvilSyncShared,
dbQuery, dbQuery,
dbSubRefreshTimestamp, dbSubRefreshTimestamp,
dbWrite, dbWrite,
} = require('../lib/accessDB'); } from '../lib/accessModule';
const getFilesOverview = require('../lib/request_handlers/files/getFilesOverview'); import getFile from '../lib/request_handlers/file/getFile';
const getFileDetail = require('../lib/request_handlers/files/getFileDetail'); import getFileDetail from '../lib/request_handlers/file/getFileDetail';
const uploadSharedFiles = require('../middlewares/uploadSharedFiles'); import uploadSharedFiles from '../middlewares/uploadSharedFiles';
const router = express.Router(); const router = express.Router();
@ -37,7 +37,7 @@ router
response.status(204).send(); response.status(204).send();
}) })
.get('/', getFilesOverview) .get('/', getFile)
.get('/:fileUUID', getFileDetail) .get('/:fileUUID', getFileDetail)
.post('/', uploadSharedFiles.single('file'), ({ file, body }, response) => { .post('/', uploadSharedFiles.single('file'), ({ file, body }, response) => {
console.log('Receiving shared file.'); console.log('Receiving shared file.');
@ -112,28 +112,35 @@ router
} }
if (fileLocations) { if (fileLocations) {
fileLocations.forEach(({ fileLocationUUID, isFileLocationActive }) => { fileLocations.forEach(
let fileLocationActive = 0; ({
let jobName = 'purge'; fileLocationUUID,
let jobTitle = '0136'; isFileLocationActive,
let jobDescription = '0137'; }: {
fileLocationUUID: string;
if (isFileLocationActive) { isFileLocationActive: boolean;
fileLocationActive = 1; }) => {
jobName = 'pull_file'; let fileLocationActive = 0;
jobTitle = '0132'; let jobName = 'purge';
jobDescription = '0133'; let jobTitle = '0136';
} let jobDescription = '0137';
if (isFileLocationActive) {
fileLocationActive = 1;
jobName = 'pull_file';
jobTitle = '0132';
jobDescription = '0133';
}
query += ` query += `
UPDATE file_locations UPDATE file_locations
SET SET
file_location_active = '${fileLocationActive}', file_location_active = '${fileLocationActive}',
modified_date = '${dbSubRefreshTimestamp()}' modified_date = '${dbSubRefreshTimestamp()}'
WHERE file_location_uuid = '${fileLocationUUID}';`; WHERE file_location_uuid = '${fileLocationUUID}';`;
const targetHosts = dbQuery( const targetHosts = dbQuery(
`SELECT `SELECT
anv.anvil_node1_host_uuid, anv.anvil_node1_host_uuid,
anv.anvil_node2_host_uuid, anv.anvil_node2_host_uuid,
anv.anvil_dr1_host_uuid anv.anvil_dr1_host_uuid
@ -141,22 +148,23 @@ router
JOIN file_locations AS fil_loc JOIN file_locations AS fil_loc
ON anv.anvil_uuid = fil_loc.file_location_anvil_uuid ON anv.anvil_uuid = fil_loc.file_location_anvil_uuid
WHERE fil_loc.file_location_uuid = '${fileLocationUUID}';`, WHERE fil_loc.file_location_uuid = '${fileLocationUUID}';`,
).stdout; ).stdout;
targetHosts.flat().forEach((hostUUID) => { targetHosts.flat().forEach((hostUUID: string) => {
if (hostUUID) { if (hostUUID) {
anvilSyncSharedFunctions.push(() => anvilSyncSharedFunctions.push(() =>
dbJobAnvilSyncShared( dbJobAnvilSyncShared(
jobName, jobName,
`file_uuid=${fileUUID}`, `file_uuid=${fileUUID}`,
jobTitle, jobTitle,
jobDescription, jobDescription,
{ jobHostUUID: hostUUID }, { jobHostUUID: hostUUID },
), ),
); );
} }
}); });
}); },
);
} }
console.log(`Query (type=[${typeof query}]): [${query}]`); console.log(`Query (type=[${typeof query}]): [${query}]`);
@ -166,7 +174,7 @@ router
try { try {
({ stdout: queryStdout } = dbWrite(query)); ({ stdout: queryStdout } = dbWrite(query));
} catch (queryError) { } catch (queryError) {
console.log(`Query error: ${queryError}`); console.log(`Failed to execute query; CAUSE: ${queryError}`);
response.status(500).send(); response.status(500).send();
} }
@ -191,4 +199,4 @@ router
response.status(200).send(queryStdout); response.status(200).send(queryStdout);
}); });
module.exports = router; export default router;

@ -0,0 +1,28 @@
import express from 'express';
import {
createHost,
createHostConnection,
deleteHostConnection,
getHost,
getHostConnection,
getHostDetail,
prepareHost,
updateHost,
} from '../lib/request_handlers/host';
const CONNECTION_PATH = '/connection';
const router = express.Router();
router
.get('/', getHost)
.get(CONNECTION_PATH, getHostConnection)
.get('/:hostUUID', getHostDetail)
.post('/', createHost)
.post(CONNECTION_PATH, createHostConnection)
.put('/prepare', prepareHost)
.put('/:hostUUID', updateHost)
.delete(CONNECTION_PATH, deleteHostConnection);
export default router;

@ -0,0 +1,27 @@
import { Router } from 'express';
import anvilRouter from './anvil';
import commandRouter from './command';
import echoRouter from './echo';
import fileRouter from './file';
import hostRouter from './host';
import jobRouter from './job';
import networkInterfaceRouter from './network-interface';
import serverRouter from './server';
import sshKeyRouter from './ssh-key';
import userRouter from './user';
const routes: Readonly<Record<string, Router>> = {
anvil: anvilRouter,
command: commandRouter,
echo: echoRouter,
file: fileRouter,
host: hostRouter,
job: jobRouter,
'network-interface': networkInterfaceRouter,
server: serverRouter,
'ssh-key': sshKeyRouter,
user: userRouter,
};
export default routes;

@ -0,0 +1,9 @@
import express from 'express';
import { getJob } from '../lib/request_handlers/job';
const router = express.Router();
router.get('/', getJob);
export default router;

@ -0,0 +1,9 @@
import express from 'express';
import { getNetworkInterface } from '../lib/request_handlers/network-interface';
const router = express.Router();
router.get('/', getNetworkInterface);
export default router;

@ -0,0 +1,16 @@
import express from 'express';
import {
createServer,
getServer,
getServerDetail,
} from '../lib/request_handlers/server';
const router = express.Router();
router
.get('/', getServer)
.get('/:serverUUID', getServerDetail)
.post('/', createServer);
export default router;

@ -0,0 +1,14 @@
import express from 'express';
import {
deleteSSHKeyConflict,
getSSHKeyConflict,
} from '../lib/request_handlers/ssh-key';
const router = express.Router();
router
.get('/conflict', getSSHKeyConflict)
.delete('/conflict', deleteSSHKeyConflict);
export default router;

@ -0,0 +1,9 @@
import express from 'express';
import { getUser } from '../lib/request_handlers/user';
const router = express.Router();
router.get('/', getUser);
export default router;

@ -0,0 +1,3 @@
interface AnvilDataStruct {
[key: string]: AnvilDataStruct | boolean;
}

@ -0,0 +1,33 @@
type AnvilDetailForProvisionServer = {
anvilUUID: string;
anvilName: string;
anvilDescription: string;
hosts: Array<{
hostUUID: string;
hostName: string;
hostCPUCores: number;
hostMemory: string;
}>;
anvilTotalCPUCores: number;
anvilTotalMemory: string;
servers: Array<{
serverUUID: string;
serverName: string;
serverCPUCores: number;
serverMemory: string;
}>;
anvilTotalAllocatedCPUCores: number;
anvilTotalAllocatedMemory: string;
anvilTotalAvailableCPUCores: number;
anvilTotalAvailableMemory: string;
storageGroups: Array<{
storageGroupUUID: string;
storageGroupName: string;
storageGroupSize: string;
storageGroupFree: string;
}>;
files: Array<{
fileUUID: string;
fileName: string;
}>;
};

@ -0,0 +1,8 @@
type AnvilOverview = {
anvilName: string;
anvilUUID: string;
hosts: Array<{
hostName: string;
hostUUID: string;
}>;
};

@ -0,0 +1,3 @@
type BuildGetRequestHandlerOptions = {
beforeRespond?: (queryStdout: unknown) => unknown;
};

@ -0,0 +1,9 @@
type BuildQueryDetailOptions = { keys?: string[] | '*' };
type BuildQueryDetailReturn = {
query: string;
} & Pick<BuildQueryOptions, 'afterQueryReturn'>;
type BuildQueryDetailFunction = (
options?: BuildQueryDetailOptions,
) => BuildQueryDetailReturn;

@ -0,0 +1,10 @@
type QueryResultModifierFunction = (output: unknown) => unknown;
type BuildQueryOptions = {
afterQueryReturn?: QueryResultModifierFunction;
};
type BuildQueryFunction = (
request: import('express').Request,
options?: BuildQueryOptions,
) => string;

@ -0,0 +1,4 @@
type CallOptions = {
parameters?: unknown[];
notCallableReturn?: unknown;
};

@ -0,0 +1,11 @@
type CreateHostConnectionRequestBody = {
dbName?: string;
ipAddress: string;
isPing?: boolean;
// Host password; same as database password.
password: string;
port?: number;
sshPort?: number;
// database user.
user?: string;
};

@ -0,0 +1,9 @@
type DBInsertOrUpdateFunctionCommonParams = ModuleSubroutineCommonParams & {
file: string;
line?: number;
};
type DBInsertOrUpdateFunctionCommonOptions = Omit<
ExecModuleSubroutineOptions,
'subParams' | 'subModuleName'
>;

@ -0,0 +1,11 @@
type DBJobParams = DBInsertOrUpdateFunctionCommonParams & {
job_command: string;
job_data?: string;
job_name: string;
job_title: string;
job_description: string;
job_host_uuid?: string;
job_progress?: number;
};
type DBInsertOrUpdateJobOptions = DBInsertOrUpdateFunctionCommonOptions;

@ -0,0 +1,18 @@
type DBVariableParams = DBInsertOrUpdateFunctionCommonParams & {
update_value_only?: 0 | 1;
variable_default?: string;
varaible_description?: string;
variable_name?: string;
variable_section?: string;
variable_source_table?: string;
variable_source_uuid?: string;
variable_uuid?: string;
variable_value?: number | string;
};
type DBInsertOrUpdateVariableOptions = DBInsertOrUpdateFunctionCommonOptions;
type DBInsertOrUpdateVariableFunction = (
subParams: DBVariableParams,
options?: DBInsertOrUpdateVariableOptions,
) => string;

@ -0,0 +1,3 @@
type DBJobAnvilSyncSharedOptions = {
jobHostUUID?: string;
};

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save