/store

Primary LanguageShellMIT LicenseMIT

store

This repository will automatically update Saltcorn Marketplace after updating of main branch.

Usage for plugin/pack author

  1. Fork repository.
  2. Add file for your plugin/pack to extensions or packs directory.
  3. Commit/push to your fork.
  4. Create PR to upstream repository.
  5. Once PR will be accepted your plugin/pack will be automatically pushed to be available from Saltcorn Marketplace.

Usage for maintainer

  1. Go to Project Settings/Secrets and variables/Actions and create $SALTCORN_TOKEN secret to Saltcorn Marketplace REST API token.
  2. Go to Project Settings/Actions/General and set "Workflow permissions" to "Read and write permissions".
  3. Add update_ext and update_pack triggers to Saltcorn Marketplace (I.e. by importing pack embedded here or downloading it from original repo).
  4. Use "Saltcorn Marketplace Pull" once to import current content of Saltcorn Marketplace to repository and merge this branch to main.
{"tables":[],"views":[],"plugins":[],"pages":[],"roles":[],"library":[],"triggers":[{"name":"update_ext","description":"","action":"run_js_code","when_trigger":"API call","configuration":{"code":"var url, plugin, ext_table, ext_row, row_update;\r\n\r\nfunction assert(check, error) {\r\n  //console.log(\"assert(\", check, \", \", error, \")\");\r\n  if(!check) {\r\n    //console.error(\"assert: throw \", error);\r\n    throw error;\r\n  }\r\n}\r\n\r\nfunction data_wrap(data) {\r\n  return data;\r\n  var sys_require_assert, require_assert;\r\n  try { sys_require_assert = sys_require('node:assert'); } catch(e) {}\r\n  try { require_assert = require('node:assert'); } catch(e) {}\r\n  return {data, sys_require_assert: sys_require_assert !== undefined, require_assert: require_assert !== undefined};\r\n}\r\n\r\nfunction check_plugin(data) {\r\n  return data && data.name && data.source && data.source in {\"npm\":0,\"git\":0,\"github\":0} && data.location;\r\n}\r\nfunction plugin_data({name, source, location, description, documentation_link, keep_local, unsafe, has_auth, has_theme}) {\r\n  return {name, source, location, description, documentation_link, keep_local, unsafe, has_auth, has_theme};\r\n}\r\n\r\ntry {\r\n  body = row || body;\r\n} catch(e) {}\r\n\r\nassert(!!(body && (body.url || check_plugin(body.plugin) || check_plugin(body))), \"Payload must contains url or plugin data.\");\r\nif(body.url) {\r\n  assert(!body.pack, \"Can't include pack in both pack and url form.\");\r\n  url = body.url;\r\n  if(url.startsWith('/')) {\r\n    url = (body.protocol?body.protocol:'http')+'://'+([req.ips,[req.ip]].flat()[0])+(body.port?':'+String(body.port):'')+url;\r\n  }\r\n  plugin = plugin_data(await fetchJSON(url, {method: 'GET'}));\r\n} else if(check_plugin(body.plugin)) {\r\n  plugin = plugin_data(body.plugin);\r\n} else {\r\n  plugin = plugin_data(body);\r\n}\r\n//console.log(\"plugin =\", plugin);\r\next_table = Table.findOne({name: 'extensions'});\r\next_row = await ext_table.getRow({name: plugin.name});\r\n//console.log(\"ext_row =\", ext_row);\r\nif(ext_row && ext_row.id && (plugin.keep_local ?? false) && ext_row?.override) {\r\n  // noop;\r\n} else if(ext_row && ext_row.id) {\r\n  delete plugin.keep_local;\r\n  return data_wrap(await ext_table.tryUpdateRow(plugin, ext_row.id));\r\n} else {\r\n  plugin.downloads = 0;\r\n  delete plugin.keep_local;\r\n  return data_wrap(await ext_table.tryInsertRow(plugin));\r\n}\r\n","run_where":"Server"},"channel":null,"min_role":40},{"name":"update_pack","description":"","action":"run_js_code","when_trigger":"API call","configuration":{"code":"var url, packs_table, pack_row;\r\n\r\nfunction assert(check, error) {\r\n  //console.log(\"assert(\", check, \", \", error, \")\");\r\n  if(!check) {\r\n    //console.error(\"assert: throw \", error);\r\n    throw error;\r\n  }\r\n}\r\n\r\nfunction data_wrap(data) {\r\n  return data;\r\n  var sys_require_assert, require_assert;\r\n  try { sys_require_assert = sys_require('node:assert'); } catch(e) {}\r\n  try { require_assert = require('node:assert'); } catch(e) {}\r\n  return {data, sys_require_assert: sys_require_assert !== undefined, require_assert: require_assert !== undefined};\r\n}\r\n\r\ntry {\r\n  body = row || body;\r\n} catch(e) {}\r\n\r\nassert(!!(body && body.name && (body.url || body.pack)), \"Payload must contains name and pack in pack or url forms.\");\r\nif(body.url) {\r\n  assert(!body.pack, \"Can't include pack in both pack and url form.\");\r\n  url = body.url;\r\n  if(url.startsWith('/')) {\r\n    url = (body.protocol?body.protocol:'http')+'://'+([req.ips,[req.ip]].flat()[0])+(body.port?':'+String(body.port):'')+url;\r\n  }\r\n  body.pack = await fetchJSON(url, {method: 'GET'});\r\n}\r\npacks_table = Table.findOne({name: 'packs'});\r\npack_row = await packs_table.getRow({name: body.name});\r\nif(pack_row && pack_row.id && (body.keep_local ?? false) && pack_row?.override) {\r\n  // noop;\r\n} else if(pack_row && pack_row.id) {\r\n  return data_wrap(await packs_table.tryUpdateRow({pack:body.pack,description:body.description?body.description:pack_row.description}, pack_row.id));\r\n} else\r\n  return data_wrap(await packs_table.tryInsertRow({name:body.name,pack:body.pack,description:body.description}));\r\n","run_where":"Server"},"channel":null,"min_role":40}],"tags":[],"models":[],"model_instances":[],"event_logs":[]}

PS: One can fork this repository for maintaining secondary Saltcorn Marketplace (either on separate tenant of public Saltcorn (like this repo and store), or on your own instance).

Caveats

  • Removed files will not be removed from store (and will be recreated in case of running "Saltcorn Marketplace Pull").
  • Script in "Saltcorn Marketplace Pull" downloads whole tables in two requests and distribute data to files locally, but script in "Saltcorn Marketplace Push" uploads every file in separate request.
    1. One option is to optimize transfer load — call update_ext/update_pack only for added/changed files.
    2. Other option is to support deletion with drawback of stability — call before uploading all files trigger that will clean tables.
    3. Third option is to support deletion with drawback of complexity — join all files locally and somehow pass it to trigger that will cleanup and update tables in one transaction. There will be problem with input size (see saltcorn#1686).