More documentation, and a display of the status of each file's docs quality. It also includes detecting CAPS words like TODO, BUG, and showing a small icon next to each function.

main
Zed A. Shaw 2 years ago
parent 53eca88150
commit 80916c6a03
  1. 3
      admin/pages/DocsBrowser.svelte
  2. 70
      lib/email.js
  3. 29
      lib/logging.js
  4. 11
      lib/models.js
  5. 9
      lib/ormish.js
  6. 109
      lib/queues.js
  7. 2
      package.json

@ -105,7 +105,6 @@
flex-direction: column; flex-direction: column;
width: 100%; width: 100%;
margin-bottom: 1rem; margin-bottom: 1rem;
font-weight: 600;
} }
export > heading h4, export > heading h4,
@ -114,7 +113,7 @@
{ {
margin-top: 0.5rem; margin-top: 0.5rem;
font-family: var(--font-family); font-family: var(--font-family);
font-weight: 900; font-weight: 600;
} }
export > heading meta-data { export > heading meta-data {

@ -1,3 +1,19 @@
/*
Helps with some of the rough parts of `nodemailer`. It's main job is to
craft templates from `emails/`, load configurations from `secrets/email.json`,
and craft fancy HTML emails from those.
The file `emails/config.json` also contains a lot of additional template variables
that you can use to add more dynamic text to send emails. For example, it currenlty
has the company name, address, and unsubscribe links.
It also has some of the email testing found in the
`admin/pages/EmailDNS.svelte` and `admin/pages/EmailSend.svelte` testing
tools. If you want to learn how to confirm your email setup is working look
at `dns_check`.
*/
import nodemailer from "nodemailer"; import nodemailer from "nodemailer";
import fs from 'fs/promises'; import fs from 'fs/promises';
import _ from 'lodash'; import _ from 'lodash';
@ -9,6 +25,15 @@ const log = logging.create("lib/email.js");
// TODO: move this out to an easy to access/change location // TODO: move this out to an easy to access/change location
let configuration = {}; let configuration = {};
/*
Configures the `nodemailer` transporter based on the configuration in `secrets/email.json`.
___FOOTGUN___: When you run with `npm run DANGER_ADMIN` it sets the `process.env.DANGER_ADMIN`
and that triggers the `nodemailer` console logger. This also happens if you set `DEBUG=1` on
the command line so that `process.env.DEBUG` is set. Currently there's no way to prevent this,
so if you want to run in `DANGER_ADMIN/DEBUG` mode _and_ send emails to your mail server then
you'll have to hack `configure_transporter`.
*/
const configure_transporter = async () => { const configure_transporter = async () => {
if(process.env.DANGER_ADMIN || process.env.DEBUG) { if(process.env.DANGER_ADMIN || process.env.DEBUG) {
configuration = { configuration = {
@ -25,11 +50,30 @@ const configure_transporter = async () => {
return nodemailer.createTransport(configuration); return nodemailer.createTransport(configuration);
} }
// TODO: trash hacks to get the config out for the mail testing tool /*
___TODO___: Trash hacks to get the config out for the mail testing tool.
*/
export const get_config = () => configuration; export const get_config = () => configuration;
/*
The currently configured `nodemailer` transporter, if you want to raw
`nodemailer` work.
*/
export const transporter = await configure_transporter(); export const transporter = await configure_transporter();
/*
Load the `.html` and `.txt` templates from `email/` based on the name. Files
are mapped as `emails/${name}.html` or `emails/${name}.txt`. These two templates
will make the basis of an email that supports both text and HTML display.
__FOOTGUN__: The HTML templates are incredibly generic and probably cause spam detectors
to go crazy. I got them from somewhere just to get started but they need a full
rewrite. If you're using them consider stripping the HTML templates to the ground and
hand crafting your own to avoid spam triggers.
+ `name string` -- The name of the template to load.
+ ___return___ {html:, txt:} -- The `.html` and `.txt` templates as keys.
*/
export const load_templates = async (name) => { export const load_templates = async (name) => {
const html = await fs.readFile(`emails/${name}.html`); const html = await fs.readFile(`emails/${name}.html`);
const txt = await fs.readFile(`emails/${name}.txt`); const txt = await fs.readFile(`emails/${name}.txt`);
@ -40,6 +84,14 @@ export const load_templates = async (name) => {
} }
} }
/*
When `process.env.DEBUG` is set (using `DEBUG=1` on the CLI) this will write all
emails to the `debug/emails` directory so you can load them in a browser or
text editor to see how they'll look without sending them to a client.
+ `templates Object` -- The templates you get from `load_templates`.
+ `name String` -- The name to write the files to so `name=x` would write `x.html` and `x.txt`.
*/
export const debug_templates = async (templates, name) => { export const debug_templates = async (templates, name) => {
if(process.env.DEBUG) { if(process.env.DEBUG) {
log.debug(`Writing debug email for ${name} to debug/emails/${name}.(html|txt)`); log.debug(`Writing debug email for ${name} to debug/emails/${name}.(html|txt)`);
@ -50,8 +102,12 @@ export const debug_templates = async (templates, name) => {
} }
} }
/* This never rejects the promise it makes but instead returns /*
* any errors it receives or undefined if none. Uses the antiquated `transporter.sendMail` callback to send the email,
but wraps it in a promise so you get logging on errors, and a result
returned like a modern `async` function.
+ `data Object` -- The nodemailer configuration for the email to send.
*/ */
export const send_email = async (data) => { export const send_email = async (data) => {
const result = new Promise((resolve, reject) => { const result = new Promise((resolve, reject) => {
@ -91,6 +147,14 @@ const add_reverse_error = (result, error) => {
result.reverse_errors.push(res_error); result.reverse_errors.push(res_error);
} }
/*
Performs a check of the DNS records for `hostname` to make sure
they have all the settings most email providers demand. It's simple
but catches a lot of missing information like reverse DNS records,
SPF records, and DMARC.
+ `hostname String` -- The host to query and analyze,
*/
export const dns_check = async (hostname) => { export const dns_check = async (hostname) => {
let result = { let result = {
ip4: {}, ip4: {},

@ -1,9 +1,23 @@
/*
A simple logging helper that uses the `pino` logging system. Pino is actually
not very good but it works well enough. My main complaint is it seems Pino's
speed comes from doing _zero_ string interpolation on its own. You have to
craft all of the strings yourself, which is probably the majority of the processing
time in logging, so Pino is effectively "cheating" by claiming it's fast but
not doing anything. If you ever want to beat Pino just don't log anything too.
Fastest log is no logs at all.
*/
import pino from 'pino'; import pino from 'pino';
import pinoPretty from 'pino-pretty'; import pinoPretty from 'pino-pretty';
import path from 'path'; import path from 'path';
const log_level = process.env.PROD === undefined ? "debug" : "info"; const log_level = process.env.PROD === undefined ? "debug" : "info";
/*
Base log connector using `pino` logging. You can use this but
it's probably better to use the `create` method below so you can
specify the file this log came from.
*/
export const log = pino({ export const log = pino({
level: log_level, level: log_level,
prettyPrint: { prettyPrint: {
@ -16,6 +30,21 @@ export const log = pino({
prettifier: pinoPretty prettifier: pinoPretty
}); });
/*
This will create a `pino` logger with the name of the file in the log messages. Incredibly
useful when you're trying to find out where something happened. To make this work you need
to do the following:
```javascript
const log = logging.create(import.meta.url);
```
The `import.meta.url` is how you get a the current file in ESM style modules. In older
`node` you'd use so the called "magic" variable `__file`. I guess if you put `__` in a
variable name it's considered magic, but if you attach `meta.url` to a keyword like `import`
and then also make that keyword a function magically then it's definitely not magic and
suprior to a simple variable everyone knows.
*/
export const create = (import_url) => { export const create = (import_url) => {
const pd = path.parse(import_url); const pd = path.parse(import_url);
const log_line = `${path.basename(pd.dir)}/${pd.base}`; const log_line = `${path.basename(pd.dir)}/${pd.base}`;

@ -102,9 +102,6 @@ export class User extends Model.from_table('user') {
ensure that the password and password_repeat are the same, lowercase the email, ensure that the password and password_repeat are the same, lowercase the email,
clean out unwanted attributes with `User.clean`, generate required keys, etc. clean out unwanted attributes with `User.clean`, generate required keys, etc.
- BUG: Test out bugs feature.
- TODO: Test out todo feature.
+ `attr Object` - attributes usually from a web form + `attr Object` - attributes usually from a web form
+ ___return___ `Object`, `undefined` or the new user on success + ___return___ `Object`, `undefined` or the new user on success
*/ */
@ -117,7 +114,6 @@ export class User extends Model.from_table('user') {
user = User.clean(attr); user = User.clean(attr);
} }
// TODO: force emails to lowercase here rather than in the database?
user.email = user.email.toLowerCase(); user.email = user.email.toLowerCase();
// validate here? // validate here?
@ -351,7 +347,7 @@ export class Site extends Model.from_table("site") {
tell the database to add to a counter it knows, then to select that number, tell the database to add to a counter it knows, then to select that number,
add to it, then set it again. add to it, then set it again.
_TODO_: figure out how to get sqlite3 to do a return of the value. ___TODO___: figure out how to get sqlite3 to do a return of the value.
+ `key string` - The key to set. + `key string` - The key to set.
+ `count number` - How much to increment. + `count number` - How much to increment.
@ -375,6 +371,11 @@ export class Site extends Model.from_table("site") {
+ [livestream](/admin/#/table/livestream) - admin this table + [livestream](/admin/#/table/livestream) - admin this table
*/ */
export class Livestream extends Model.from_table("livestream") { export class Livestream extends Model.from_table("livestream") {
/*
Return the `Media` attached to this (1:1) `Livestream`. This is set
after the stream is done and is stored for replay later.
*/
media() { media() {
return Media.first({id: this.media_id}); return Media.first({id: this.media_id});
} }

@ -25,7 +25,7 @@ import { attachPaginate } from 'knex-paginate';
A preconfigured knex.js driver using the config.development configuration A preconfigured knex.js driver using the config.development configuration
by default. by default.
_TODO_: Need to make this configurable, even though I just use one config right now since I run sqlite3 all the time. ___TODO___: Need to make this configurable, even though I just use one config right now since I run sqlite3 all the time.
*/ */
export const knex = knexConfig(config.development); export const knex = knexConfig(config.development);
@ -73,6 +73,9 @@ await load_schema();
is called by Model.validation and you can call it directly to get rules is called by Model.validation and you can call it directly to get rules
for a database table. for a database table.
___BUG___: validator doesn't do proper date formatting yet, so it won't support date types.
1. `name string` - the table name. 1. `name string` - the table name.
2. `rules Object` - default rules with empty "" for the rules you want filled in 2. `rules Object` - default rules with empty "" for the rules you want filled in
3. `all boolean` - set this to true if you want everything 3. `all boolean` - set this to true if you want everything
@ -113,7 +116,6 @@ export const validation = (name, rules, all=false, no_id=true) => {
rules[key] = required + "numeric"; rules[key] = required + "numeric";
break; break;
case "date": case "date":
// BUG: validator doesn't do proper date formatting yet
// rules[key] = required + "date"; // rules[key] = required + "date";
break; break;
case "email": case "email":
@ -435,6 +437,8 @@ export class Model {
/* /*
Implements an upsert (insert but update on conflict) for Postgres, MySQL, and SQLite3 only. Implements an upsert (insert but update on conflict) for Postgres, MySQL, and SQLite3 only.
___TODO___: allow specifying returns for databases that support it
+ attr `Object` - The attributes to insert or update. + attr `Object` - The attributes to insert or update.
+ conflict_key `string` - The key that can cause a conflict then update. + conflict_key `string` - The key that can cause a conflict then update.
+ merge `boolean` - Defaults to true and will change the record. false will ignore and not update on conflict. + merge `boolean` - Defaults to true and will change the record. false will ignore and not update on conflict.
@ -444,7 +448,6 @@ export class Model {
assert(conflict_key !== undefined, `You forgot to set the conflict_key on upsert to table ${this.table_name}`); assert(conflict_key !== undefined, `You forgot to set the conflict_key on upsert to table ${this.table_name}`);
let result = undefined; let result = undefined;
// TODO: allow specifying returns for databases that support it
if(merge) { if(merge) {
result = await knex(this.table_name).insert(attr).onConflict(conflict_key).merge(); result = await knex(this.table_name).insert(attr).onConflict(conflict_key).merge();
} else { } else {

@ -1,42 +1,145 @@
/*
Used to abstract away the Queue system used. It uses the Bull queue system
internally, which uses Redis as a queue. This works fine but if you wanted
to use something else you'd need to rewrite this file and maintain the data
formats sent.
The point of using a Queue is to take processing that might take a while and
push it to a separate process. For example, after a user has paid you may
need to run complicated PDF libraries to craft their receipt. You don't want
to do that _inside_ your `api/` handler as that will slow down the response.
The PDF/receipt also isn't something they need to know about immediately, so
just shove that into a queue a queue and let another process (or multiple
processes) do it.
A good pattern is to create your `api/` handler so that it works, then identify
any code that could be placed in a queue job, then move that code into a
queue handler in `queues/`. The ideal situation is where your `api/` handler
does nothing but send a message to a queue. If the operation the user requested
doesn't need immediate feedback, then consider using a queue and sending them
an email when it's done.
*/
import Queue from 'bull'; import Queue from 'bull';
/*
Create a new Queue with the given name. Mostly used internally but
you if you need to use the queue system in your own code then this
how you do it.
___BUG___: I'm not clear on the semantics of the bull queues. Should I make
one per process and use it in all functions? Or, make one for every
place I need it and don't share them?
+ `name string` -- Name for the queue.
+ ___return___ `Queue` object to use.
*/
export const create = (name) => { export const create = (name) => {
// I'm not clear on the semantics of the bull queues.
// Should I make one per process and use it in all functions?
// Or, make one for every place I need it and don't share them?
return new Queue(name); return new Queue(name);
} }
/*
Sends a `mail/registration` message to the queue that handles mail. See `queues/mail.js`
for how the sending actually works.
___WARNING__: This doesn't restrict what can go in the queue, and it's possible Bull isn't
that good at clearing the queue of records, so user information may be leaked into redis.
+ `user User` -- The `User` object should be ready to use by the quee in `queues/mail.js`.
*/
export const send_registration = (user) => { export const send_registration = (user) => {
const mail = create("mail/registration"); const mail = create("mail/registration");
mail.add({user}); mail.add({user});
} }
/*
Sends the welcome email to a user. Look in `queues/mail.js` to see how this is really done.
___WARNING__: This doesn't restrict what can go in the queue, and it's possible Bull isn't
that good at clearing the queue of records, so user information may be leaked into redis.
+ `user User` -- The `User` object should be ready to use by the quee in `queues/mail.js`.
*/
export const send_welcome = (user) => { export const send_welcome = (user) => {
const mail = create("mail/welcome"); const mail = create("mail/welcome");
mail.add({user}); mail.add({user});
} }
/*
Sends a receipt request to the queue process. Receipt processing can get insanely complicated
depending on what the user needs. I tend to use simple HTML email, but if you want to get
into pointless PDF receipts then definitely use a queue handler.
___WARNING__: This doesn't restrict what can go in the queue, and it's possible Bull isn't
that good at clearing the queue of records, so user information may be leaked into redis.
+ `user User` -- The `User` object should be ready to use by the quee in `queues/mail.js`.
+ `payment_id number` -- The `id` for the payment to attach to the receipt.
+ `product_id number` -- The `Product` they purchased.
*/
export const send_receipt = (user, payment_id, product_id) => { export const send_receipt = (user, payment_id, product_id) => {
const mail = create("mail/receipt"); const mail = create("mail/receipt");
mail.add({user, payment_id, product_id}); mail.add({user, payment_id, product_id});
} }
/*
Sends a password reset __REQUEST__ email. This doesn't do the reset, only give them
the code to their email for the reset. One thing this does is include the IP address
and browser User Agent so the receiver will know if they requested it or not. The security
mechanism to prevent repeated reset attempts is:
+ There's a max limit on requests in `api/password_reset.js:RESET_MAX`.
+ There's an enforced delay on requests of 1 per 10 hours in `api/password_reset.js:RESET_HOURS_MAX`.
+ The user's email is notified of the IP address and browser used on request _and_ reset complete.
+ The account is locked if `RESET_MAX` is reached and no more attempts are accepted.
With this an attacker would get one guess every 10 hours, the user would be notified of the attempt (giving them time to notify admins), and if the attacker makes > `RESET_MAX` attempts the account is locked.
___WARNING__: This doesn't restrict what can go in the queue, and it's possible Bull isn't
that good at clearing the queue of records, so user information may be leaked into redis.
+ `user User` -- The `User` object should be ready to use by the queue in `queues/mail.js`.
+ `ip_address string` -- The IP address of the computer requesting the reset.
+ `browser string` -- The user agent of the browser requesting.
*/
export const send_reset = (user, ip_address, browser) => { export const send_reset = (user, ip_address, browser) => {
const mail = create("mail/reset"); const mail = create("mail/reset");
mail.add({user, ip_address, browser}); mail.add({user, ip_address, browser});
} }
/*
Send the final notification that the reset was successful. See `send_reset` for more
security rules.
___WARNING__: This doesn't restrict what can go in the queue, and it's possible Bull isn't
that good at clearing the queue of records, so user information may be leaked into redis.
+ `user User` -- The `User` object should be ready to use by the queue in `queues/mail.js`.
+ `ip_address string` -- The IP address of the computer requesting the reset.
+ `browser string` -- The user agent of the browser requesting.
*/
export const send_reset_finished = (user, ip_address, browser) => { export const send_reset_finished = (user, ip_address, browser) => {
const mail = create("mail/reset_finished"); const mail = create("mail/reset_finished");
mail.add({user, ip_address, browser}); mail.add({user, ip_address, browser});
} }
/*
Used to quickly update the current view statistics on `Livestream` to all
users viewing it. Since this isn't crucial and not something a user expects
to be updated immediately it's easy to shove it into a queue. The queue handler
in `queues/live.js` will use `sockets/live.js` to send out an announce.
+ `api_key string` -- Used to make sure this is from the server. Users shouldn't see this.
*/
export const send_update_viewers = (api_key) => { export const send_update_viewers = (api_key) => {
const live = create("live/update_viewers"); const live = create("live/update_viewers");
live.add({api_key}); live.add({api_key});
} }
/*
Adds to a `Livesteam` view count in the database for statistics.
+ `livestream_id number` -- The `id` of the `Livestream` to update.
*/
export const add_view_count = (livestream_id) => { export const add_view_count = (livestream_id) => {
const live = create("live/add_view_count"); const live = create("live/add_view_count");
live.add({livestream_id}); live.add({livestream_id});

@ -17,7 +17,7 @@
"api": "nodemon ./bando.js api", "api": "nodemon ./bando.js api",
"queue": "nodemon ./bando.js queue", "queue": "nodemon ./bando.js queue",
"test": "npx ava tests/**/*.js", "test": "npx ava tests/**/*.js",
"docs": "nodemon bando.js codedoc --output public/docs/api lib/*.js client/*.js", "docs": "nodemon bando.js codedoc --output public/docs/api lib/*.js client/*.js commands/*.js api/*.js api/**/*.js queues/*.js socket/*.js",
"modules": "./bando.js load", "modules": "./bando.js load",
"modules-watch": "nodemon --watch ../ljsthw-private/db/modules/ --ext .md,.js,.sh ./bando.js load", "modules-watch": "nodemon --watch ../ljsthw-private/db/modules/ --ext .md,.js,.sh ./bando.js load",
"rendered-watch": "nodemon --ignore \"rendered/build/**/*\" --watch ./rendered --watch static -e md,svelte,js,css ./bando.js rendered", "rendered-watch": "nodemon --ignore \"rendered/build/**/*\" --watch ./rendered --watch static -e md,svelte,js,css ./bando.js rendered",

Loading…
Cancel
Save