Compare commits
No commits in common. "master" and "conversations" have entirely different histories.
master
...
conversati
@ -7,13 +7,13 @@
|
||||
/public/EQCSS.js
|
||||
/schema/**
|
||||
|
||||
tmpl_list.go
|
||||
tmpl_forum.go
|
||||
tmpl_forums.go
|
||||
tmpl_topic.go
|
||||
tmpl_topic_alt.go
|
||||
tmpl_topics.go
|
||||
tmpl_profile.go
|
||||
template_list.go
|
||||
template_forum.go
|
||||
template_forums.go
|
||||
template_topic.go
|
||||
template_topic_alt.go
|
||||
template_topics.go
|
||||
template_profile.go
|
||||
|
||||
gen_mysql.go
|
||||
gen_mssql.go
|
||||
|
9
.gitignore
vendored
9
.gitignore
vendored
@ -1,5 +1,4 @@
|
||||
tmp/*
|
||||
!tmp/filler.txt
|
||||
tmp2/*
|
||||
cert_test/*
|
||||
tmp.txt
|
||||
@ -14,8 +13,6 @@ backups/*.sql
|
||||
logs/*.log
|
||||
config/config.json
|
||||
node_modules/*
|
||||
samples/vue/node_modules/*
|
||||
samples/vue/*
|
||||
bin/*
|
||||
out/*
|
||||
*.exe
|
||||
@ -24,11 +21,11 @@ out/*
|
||||
*.log
|
||||
.DS_Store
|
||||
.vscode/launch.json
|
||||
schema/lastSchema.json
|
||||
config/config.go
|
||||
QueryGen
|
||||
RouterGen
|
||||
Patcher
|
||||
Gosora
|
||||
Installer
|
||||
tmpl_*.go
|
||||
tmpl_*.jgo
|
||||
Install
|
||||
template_*.go
|
3
.htaccess
Normal file
3
.htaccess
Normal file
@ -0,0 +1,3 @@
|
||||
# Gosora doesn't use Apache, this file is just here to stop Apache from blindly serving our config files, etc. when this program isn't intended to be served in such a manner at all
|
||||
|
||||
deny from all
|
26
.travis.yml
Normal file
26
.travis.yml
Normal file
@ -0,0 +1,26 @@
|
||||
env:
|
||||
global:
|
||||
- CC_TEST_REPORTER_ID=38ad11f95f28b1d33c9c31467f9eb5eb4e336e4e0e36046c86a4f7ff6e4c3977
|
||||
language: go
|
||||
go:
|
||||
- "1.10"
|
||||
- "1.11"
|
||||
- master
|
||||
before_install:
|
||||
- cd $HOME
|
||||
- git clone https://github.com/Azareal/Gosora
|
||||
- cd Gosora
|
||||
- chmod -R 0777 .
|
||||
- mv ./config/config_example.json ./config/config.json
|
||||
- ./update-deps-linux
|
||||
- ./dev-update-travis
|
||||
install: true
|
||||
before_script:
|
||||
- curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter
|
||||
- chmod +x ./cc-test-reporter
|
||||
- ./cc-test-reporter before-build
|
||||
script: ./run-linux-tests
|
||||
after_script:
|
||||
- ./cc-test-reporter after-build --exit-code $TRAVIS_TEST_RESULT
|
||||
addons:
|
||||
mariadb: '10.0'
|
9
.vscode/settings.json
vendored
9
.vscode/settings.json
vendored
@ -1,11 +1,4 @@
|
||||
// Place your settings in this file to overwrite default and user settings.
|
||||
{
|
||||
"editor.tabSize": 4,
|
||||
// Insert spaces when pressing Tab. This setting is overriden
|
||||
// based on the file contents when `editor.detectIndentation` is true.
|
||||
"editor.insertSpaces": true,
|
||||
// When opening a file, `editor.tabSize` and `editor.insertSpaces`
|
||||
// will be detected based on the file contents. Set to false to keep
|
||||
// the values you've explicitly set, above.
|
||||
"editor.detectIndentation": false
|
||||
"editor.insertSpaces": false
|
||||
}
|
51
CONTRIBUTING.md
Normal file
51
CONTRIBUTING.md
Normal file
@ -0,0 +1,51 @@
|
||||
If you want to add a contribution, you'll have to open a pull request and to sign the CLA (contributor level agreement).
|
||||
|
||||
It's mainly there to deal with any legal issues which come our way and to switch licenses without having to chase down contributors who have long stopped using the internet or are deceased or incapacitated.
|
||||
|
||||
Other uses may arise in the future, e.g. commercial licensing for companies which might not be authorised to use open source licensing and what-not, although that's currently uncertain as I'm not knowledgable about the ins and outs of the law.
|
||||
|
||||
Try to prefix commits which introduce a lot of bugs or otherwise has a large impact on the usability of Gosora with UNSTABLE.
|
||||
|
||||
If anything seems suspect, then feel free to bring up an alternative, although I'd rather not get hung up on the little details, if it's something which is purely a matter of opinion.
|
||||
|
||||
Also, please don't push new features, particularly ones which will require a great effort from other maintainers in the long term, particularly if it has fairly minor benefits to the ecosystem as a whole, unless you are willing to maintain it.
|
||||
|
||||
# Coding Standards
|
||||
|
||||
All code must be unit tested where ever possible with the exception of JavaScript which is untestable with our current technologies, tread with caution there.
|
||||
|
||||
Use tabs not spaces for indentation.
|
||||
|
||||
# Golang
|
||||
|
||||
Use the standard linter and listen to what it tells you to do.
|
||||
|
||||
The route assignments in main.go are *legacy code*, add new routes to `router_gen/routes.go` instead.
|
||||
|
||||
Try to use the single responsibility principle where ever possible, with the exception for if doing so will cause a large performance drop. In other words, don't give your interfaces / structs too many responsibilities, keep them simple.
|
||||
|
||||
Avoid hand-rolling queries. Use the builders, a ready built statement or a datastore structure instead. Preferably a datastore.
|
||||
|
||||
Commits which require the patcher / update script to be run should be prefixed with "Database Changes: "
|
||||
|
||||
More coming up.
|
||||
|
||||
# JavaScript
|
||||
|
||||
Use semicolons at the end of statements. If you don't, you might wind up breaking a minifier or two.
|
||||
|
||||
Always use strict mode.
|
||||
|
||||
Don't worry about ES5, we're targetting modern browsers. If we decide to backport code to older browsers, then we'll transpile the files.
|
||||
|
||||
Please don't use await. It incurs too much of a cognitive overhead as to where and when you can use it.
|
||||
|
||||
To keep consistency with Go code, variables must be camelCase.
|
||||
|
||||
# JSON
|
||||
|
||||
To keep consistency with Go code, map keys must be camelCase.
|
||||
|
||||
# Phrases
|
||||
|
||||
Try to keep the name of the phrase close to the actual phrase in english to make it easier for localisers to reason about which phrase is which.
|
21
Makefile
21
Makefile
@ -1,21 +0,0 @@
|
||||
|
||||
|
||||
|
||||
tidy:
|
||||
go mod tidy
|
||||
|
||||
build:
|
||||
mkdir -p bin
|
||||
go build -ldflags="-s -w" -o ./bin/gosora.exe .
|
||||
|
||||
|
||||
|
||||
clean:
|
||||
echo "Deleting artifacts from previous builds"
|
||||
rm -f template_*.go
|
||||
rm -f tmpl_*.go
|
||||
rm -f gen_*.go
|
||||
rm -f tmpl_client/template_*
|
||||
rm -f tmpl_client/tmpl_*
|
||||
rm -f ./bin/*
|
||||
rm -f ./common/gen_extend.go
|
189
README.md
Normal file
189
README.md
Normal file
@ -0,0 +1,189 @@
|
||||
# Gosora ![Build Status](https://travis-ci.org/Azareal/Gosora.svg?branch=master) [![Azareal's Discord Chat](https://img.shields.io/badge/style-Invite-7289DA.svg?style=flat&label=Discord)](https://discord.gg/eyYvtTf)
|
||||
|
||||
A super fast forum software written in Go. You can talk to us on our Discord chat!
|
||||
|
||||
The initial code-base was forked from one of my side projects, but has now gone far beyond that. We've moved along in a development and the software should be somewhat stable for general use.
|
||||
|
||||
Features may break from time to time, however I will generally try to warn of the biggest offenders in advance, so that you can tread with caution around certain commits, the upcoming v0.1 will undergo even more rigorous testing.
|
||||
|
||||
File an issue or open a topic on the forum, if there's something you want and you very well might find it landing in the software fairly quickly.
|
||||
|
||||
For plugin and theme developers, things are a little dicier, as the internal APIs and ways of writing themes are in constant flux, however some stability in that area should be coming fairly soon.
|
||||
|
||||
If you like this software, please give it a star and give us some feedback :)
|
||||
|
||||
If you dislike it, please give us some feedback on how to make it better! We're always looking for feedback. We love hearing your opinions. If there's something missing or something doesn't look quite right, don't worry! We plan to add many, many things in the run up to v0.1!
|
||||
|
||||
|
||||
# Features
|
||||
Standard Forum Functionality. All of the little things you would expect of any forum software. E.g. Common Moderation features, modlogs, theme system, avatars, bbcode parser, markdown parser, report system, per-forum permissions, group permissions and so on.
|
||||
|
||||
Custom Pages. There are some rough edges
|
||||
|
||||
Emojis. Allow your users to express themselves without resorting to serving tons upon tons of image files.
|
||||
|
||||
In-memory static file, forum and group caches. We have a slightly more dynamic cache for users and topics.
|
||||
|
||||
A profile system, including profile comments and moderation tools for the profile owner.
|
||||
|
||||
A template engine which compiles templates down to machine code. Over forty times faster than the standard template library `html/templates`, although it does remove some of the hand holding to achieve this. Compatible with templates written for `html/templates`, so you don't need to learn any new templating language.
|
||||
|
||||
A plugin system. We have a number of APIs and hooks for plugins, however they're currently subject to change and don't cover as much of the software as we'd like yet.
|
||||
|
||||
A responsive design. Looks great on mobile phones, tablets, laptops, desktops and more!
|
||||
|
||||
Other modern features like alerts, likes, advanced dashboard with live stats (CPU, RAM, online user count, and so on), etc.
|
||||
|
||||
|
||||
# Requirements
|
||||
|
||||
Go 1.10 or newer - You will need to install this. Pick the .msi, if you want everything sorted out for you rather than having to go around updating the environment settings. https://golang.org/doc/install
|
||||
|
||||
For Ubuntu, you can consult: https://tecadmin.net/install-go-on-ubuntu/
|
||||
You will also want to run `ln -s /usr/local/go/bin/go` (replace /usr/local with where ever you put Go), so that go becomes visible to other users.
|
||||
|
||||
If you followed the instructions above, you can update to the latest version of Go simply by deleting the `/go/` folder and replacing it with a `/go/` folder for the latest version of Go.
|
||||
|
||||
Git - You may need this for downloading updates via the updater. You might already have this installed on your server, if the `git` commands don't work, then install this. https://git-scm.com/downloads
|
||||
|
||||
MySQL Database - You will need to setup a MySQL Database somewhere. A MariaDB Database works equally well and is much faster than MySQL. You could use something like WNMP / XAMPP which have a little PHP script called PhpMyAdmin for managing MySQL databases or you could install MariaDB directly.
|
||||
|
||||
Download the .msi installer from [MariaDB](https://mariadb.com/downloads) and run that. You may want to set it up as a service to avoid running it every-time the computer starts up.
|
||||
|
||||
Instructions on how to set MariaDB up on Linux: https://downloads.mariadb.org/mariadb/repositories/
|
||||
|
||||
We recommend changing the root password (that is the password for the user 'root'). Remember that password, you will need it for the installation process. Of course, we would advise using a user other than root for maximum security, although that adds additional steps to the process of getting everything setup.
|
||||
|
||||
You might also want to run `mysql_secure_installation` to further harden (aka make it more secure) MySQL / MariaDB.
|
||||
|
||||
If you're using Ubuntu, you might want to look at: https://www.itzgeek.com/how-tos/linux/ubuntu-how-tos/install-mariadb-on-ubuntu-16-04.html
|
||||
|
||||
It's entirely possible that your host already has MySQL installed and ready to go, so you might be able to skip this step, particularly if it's a managed VPS or a shared host. Or they might have a quicker and easier method of setting up MySQL.
|
||||
|
||||
|
||||
# How to download
|
||||
|
||||
For Linux, you can skip down to the Installation section as it covers this.
|
||||
|
||||
On Windows, you might want to try the [GosoraBootstrapper](https://github.com/Azareal/GosoraBootstrapper), if you can't find the command prompt or otherwise can't follow those instructions. It's just a matter of double-clicking on the bat file there and it'll download the rest of the files for you.
|
||||
|
||||
# Installation
|
||||
|
||||
Consult [installation](https://github.com/Azareal/Gosora/blob/master/docs/installation.md) for instructions on how to install Gosora.
|
||||
|
||||
# Updating
|
||||
|
||||
Consult [updating](https://github.com/Azareal/Gosora/blob/master/docs/updating.md) for instructions on how to update Gosora.
|
||||
|
||||
|
||||
# Running the program
|
||||
|
||||
*Linux*
|
||||
|
||||
If you have setup a service, you can run:
|
||||
|
||||
`./pre-run-linux`
|
||||
|
||||
`service gosora start`
|
||||
|
||||
You can then, check Gosora's current status (to see if it started up properly) with:
|
||||
|
||||
`service gosora status`
|
||||
|
||||
And you can stop it with:
|
||||
|
||||
`service gosora stop`
|
||||
|
||||
If you haven't setup a service, you can run `./run-linux`, although you will be responsible for finding a way to run it in the background, so that it doesn't close when the terminal does.
|
||||
|
||||
One method might be to use: https://serverfault.com/questions/34750/is-it-possible-to-detach-a-process-from-its-terminal-or-i-should-have-used-s
|
||||
|
||||
*Windows*
|
||||
|
||||
Run `run.bat`, e.g. double-clicking on it.
|
||||
|
||||
|
||||
# How do I install plugins?
|
||||
|
||||
For the default plugins like Markdown and Helloworld, you can find them in the Plugin Manager of your Control Panel. For ones which aren't included by default, you will need to drag them from your `/extend/` directory and into the `/` directory (the root directory of your Gosora installation, where the executable and most of the main Go files are).
|
||||
|
||||
You will then need to recompile Gosora in order to link the plugin code with Gosora's code. For plugins not written in Gosora (e.g. JavaScript), you do not need to move them from the `/extend/` directory, they will automatically show up in your Control Panel ready to be installed.
|
||||
|
||||
Experimental plugins aka the ones in the `/experimental/` folder (e.g. plugin_sendmail) are similar but different. You will have to move native plugins (ones written in Go) to the root directory of your installation and will have to move experimental plugins written in other languages into the `/extend/` directory.
|
||||
|
||||
We're looking for ways to clean-up the plugin system so that all of them (except the experimental ones) are housed in `/extend/`, however we've encountered some problems with Go's packaging system. We hope to fix this issue in the future.
|
||||
|
||||
|
||||
# Images
|
||||
![Shadow Theme](https://github.com/Azareal/Gosora/blob/master/images/shadow.png)
|
||||
|
||||
![Shadow Quick Topic](https://github.com/Azareal/Gosora/blob/master/images/quick-topics.png)
|
||||
|
||||
![Tempra Simple Theme](https://github.com/Azareal/Gosora/blob/master/images/tempra-simple.png)
|
||||
|
||||
![Tempra Simple Topic List](https://github.com/Azareal/Gosora/blob/master/images/topic-list.png)
|
||||
|
||||
![Tempra Simple Mobile](https://github.com/Azareal/Gosora/blob/master/images/tempra-simple-mobile-375px.png)
|
||||
|
||||
![Cosora Prototype WIP](https://github.com/Azareal/Gosora/blob/master/images/cosora-wip.png)
|
||||
|
||||
More images in the /images/ folder. Beware though, some of them are *really* outdated. Also, keep in mind that a new theme is in the works.
|
||||
|
||||
# Dependencies
|
||||
|
||||
These are the libraries and pieces of software which Gosora relies on to function, an "ingredients" list so to speak.
|
||||
|
||||
A few of these like Rez aren't currently in use, but are things we think we'll need in the very near future and want to have those things ready, so that we can quickly slot them in.
|
||||
|
||||
* Go 1.10+
|
||||
|
||||
* MariaDB (or any other MySQL compatible database engine). We'll allow other database engines in the future.
|
||||
|
||||
* github.com/go-sql-driver/mysql For interfacing with MariaDB.
|
||||
|
||||
* golang.org/x/crypto/bcrypt For hashing passwords.
|
||||
* golang.org/x/crypto/argon2 For hashing passwords.
|
||||
|
||||
* github.com/Azareal/gopsutil For pulling information on CPU and memory usage. I've temporarily forked this, as we were having stability issues with the latest build.
|
||||
|
||||
* github.com/StackExchange/wmi Dependency for gopsutil on Windows.
|
||||
|
||||
* golang.org/x/sys/windows Also a dependency for gopsutil on Windows. This isn't needed at the moment, as I've rolled things back to an older more stable build.
|
||||
|
||||
* github.com/gorilla/websocket Needed for Gosora's Optional WebSockets Module.
|
||||
|
||||
* github.com/robertkrimen/otto Needed for the upcoming JS plugin type.
|
||||
|
||||
* gopkg.in/sourcemap.v1 Dependency for Otto.
|
||||
|
||||
* github.com/lib/pq For interfacing with PostgreSQL. You will be able to pick this instead of MariaDB soon.
|
||||
|
||||
* ithub.com/denisenkom/go-mssqldb For interfacing with MSSQL. You will be able to pick this instead of MSSQL soon.
|
||||
|
||||
* github.com/bamiaux/rez An image resizer (e.g. for spitting out thumbnails)
|
||||
|
||||
* github.com/esimov/caire The other image resizer, slower but may be useful for covering cases Rez does not. A third faster one we might point to at some point is probably Discord's Lilliput, however it requires a C Compiler and we don't want to add that as a dependency at this time.
|
||||
|
||||
* github.com/fsnotify/fsnotify A library for watching events on the file system.
|
||||
|
||||
* github.com/pkg/errors Some helpers to make it easier for us to track down bugs.
|
||||
|
||||
* More items to come here, our dependencies are going through a lot of changes, and I'll be documenting those soon ;)
|
||||
|
||||
# Bundled Plugins
|
||||
|
||||
There are several plugins which are bundled with the software by default. These cover various common tasks which aren't common enough to clutter the core with or which have competing implementation methods (E.g. plugin_markdown vs plugin_bbcode for post mark-up).
|
||||
|
||||
* Hey There / Skeleton / Hey There (JS Version) - Example plugins for helping you learn how to develop plugins.
|
||||
|
||||
* BBCode - A plugin in early development for converting BBCode Tags into HTML.
|
||||
|
||||
* Markdown - An extremely simple plugin for converting Markdown into HTML.
|
||||
|
||||
* Social Groups - An extremely unstable WIP plugin which lets users create their own little discussion areas which they can administrate / moderate on their own.
|
||||
|
||||
# Developers
|
||||
|
||||
There are a few things you'll need to know before running the more developer oriented features like the tests or the benchmarks.
|
||||
|
||||
The benchmarks are currently being rewritten as they're currently extremely serial which can lead to severe slow-downs when run on a home computer due to the benchmarks being run on the one core everything else is being run on (Browser, OS, etc.) and the tests not taking parallelism into account.
|
38
TODO.md
Normal file
38
TODO.md
Normal file
@ -0,0 +1,38 @@
|
||||
# TO-DO
|
||||
|
||||
Oh my, you caught me right at the start of this project. There's nothing to see here yet, asides from the absolute basics. You might want to look again later!
|
||||
|
||||
|
||||
The various little features which somehow got stuck in the net. Don't worry, I'll get to them!
|
||||
|
||||
More moderation features. E.g. Move, Approval Queue (Posts made by users in certain usergroups will need to be approved by a moderator before they're publically visible), etc.
|
||||
|
||||
Add a simple anti-spam measure. I have quite a few ideas in mind, but it'll take a while to implement the more advanced ones, so I'd like to put off some of those to a later date and focus on the basics. E.g. CAPTCHAs, hidden fields, etc.
|
||||
|
||||
Add more granular permissions management to the Forum Manager.
|
||||
|
||||
Add a *better* plugin system. E.g. Allow for plugins written in Javascript and ones written in Go. Also, we need to add many, many, many more plugin hooks.
|
||||
|
||||
I will need to ponder over implementing an even faster router. We don't need one immediately, although it would be nice if we could get one in the near future. It really depends. Ideally, it would be one which can easily integrate with the current structure without much work, although I'm not beyond making some alterations to faciliate it, assuming that we don't get too tightly bound to that specific router.
|
||||
|
||||
Allow themes to define their own templates and to override core templates with their own.
|
||||
|
||||
Add a friend system.
|
||||
|
||||
Improve profile customisability.
|
||||
|
||||
Implement all the common BBCode tags in plugin_bbcode
|
||||
|
||||
Implement all the common Markdown codes in plugin_markdown
|
||||
|
||||
Add more administration features.
|
||||
|
||||
Add more features for improving user engagement. E.g. A like system. I have a few of these in mind, but I've been pre-occupied with implementing other features.
|
||||
|
||||
Add a widget system.
|
||||
|
||||
Add support for multi-factor authentication.
|
||||
|
||||
Add support for secondary emails for users.
|
||||
|
||||
Improve the shell scripts and possibly add support for Make? A make.go might be a good solution?
|
1
attachs/filler.txt
Normal file
1
attachs/filler.txt
Normal file
@ -0,0 +1 @@
|
||||
This file is here so that Git will include this folder in the repository.
|
1
backups/filler.txt
Normal file
1
backups/filler.txt
Normal file
@ -0,0 +1 @@
|
||||
This file is here so that Git will include this folder in the repository.
|
33
build-linux
33
build-linux
@ -1,24 +1,31 @@
|
||||
echo "Deleting artifacts from previous builds"
|
||||
rm -f template_*.go
|
||||
rm -f gen_*.go
|
||||
rm -f tmpl_client/template_*.go
|
||||
rm -f ./Gosora
|
||||
|
||||
echo "Building the router generator"
|
||||
go build -ldflags="-s -w" -o RouterGen "./router_gen"
|
||||
cd ./router_gen
|
||||
go build -o RouterGen
|
||||
mv ./RouterGen ..
|
||||
cd ..
|
||||
echo "Running the router generator"
|
||||
./RouterGen
|
||||
|
||||
echo "Building the hook stub generator"
|
||||
go build -ldflags="-s -w" -o HookStubGen "./cmd/hook_stub_gen"
|
||||
echo "Running the hook stub generator"
|
||||
./HookStubGen
|
||||
|
||||
echo "Building the hook generator"
|
||||
go build -tags hookgen -ldflags="-s -w" -o HookGen "./cmd/hook_gen"
|
||||
echo "Running the hook generator"
|
||||
./HookGen
|
||||
|
||||
echo "Building the query generator"
|
||||
go build -ldflags="-s -w" -o QueryGen "./cmd/query_gen"
|
||||
cd ./query_gen
|
||||
go build -o QueryGen
|
||||
mv ./QueryGen ..
|
||||
cd ..
|
||||
echo "Running the query generator"
|
||||
./QueryGen
|
||||
|
||||
echo "Building Gosora"
|
||||
go generate
|
||||
go build -ldflags="-s -w" -o ./bin/gosora.exe
|
||||
go build -o Gosora
|
||||
|
||||
echo "Building the installer"
|
||||
cd ./install
|
||||
go build -o Install
|
||||
mv ./Install ..
|
||||
cd ..
|
||||
|
@ -1,38 +1,31 @@
|
||||
echo "Deleting artifacts from previous builds"
|
||||
rm -f template_*.go
|
||||
rm -f tmpl_*.go
|
||||
rm -f gen_*.go
|
||||
rm -f tmpl_client/template_*
|
||||
rm -f tmpl_client/tmpl_*
|
||||
rm -f tmpl_client/template_*.go
|
||||
rm -f ./Gosora
|
||||
rm -f ./common/gen_extend.go
|
||||
|
||||
echo "Building the router generator"
|
||||
go build -ldflags="-s -w" -o RouterGen "./router_gen"
|
||||
cd ./router_gen
|
||||
go build -o RouterGen
|
||||
mv ./RouterGen ..
|
||||
cd ..
|
||||
echo "Running the router generator"
|
||||
./RouterGen
|
||||
|
||||
echo "Building the hook stub generator"
|
||||
go build -ldflags="-s -w" -o HookStubGen "./cmd/hook_stub_gen"
|
||||
echo "Running the hook stub generator"
|
||||
./HookStubGen
|
||||
|
||||
echo "Building the hook generator"
|
||||
go build -tags hookgen -ldflags="-s -w" -o HookGen "./cmd/hook_gen"
|
||||
echo "Running the hook generator"
|
||||
./HookGen
|
||||
|
||||
echo "Generating the JSON handlers"
|
||||
easyjson -pkg common
|
||||
|
||||
echo "Building the query generator"
|
||||
go build -ldflags="-s -w" -o QueryGen "./cmd/query_gen"
|
||||
cd ./query_gen
|
||||
go build -o QueryGen
|
||||
mv ./QueryGen ..
|
||||
cd ..
|
||||
echo "Running the query generator"
|
||||
./QueryGen
|
||||
|
||||
echo "Building Gosora"
|
||||
go generate
|
||||
go build -ldflags="-s -w" -o Gosora -tags no_ws
|
||||
go build -o Gosora -tags no_ws
|
||||
|
||||
echo "Building the installer"
|
||||
go build -ldflags="-s -w" -o Installer "./install"
|
||||
cd ./install
|
||||
go build -o Install
|
||||
mv ./Install ..
|
||||
cd ..
|
||||
|
43
build-nowebsockets.bat
Normal file
43
build-nowebsockets.bat
Normal file
@ -0,0 +1,43 @@
|
||||
@echo off
|
||||
rem TODO: Make these deletes a little less noisy
|
||||
del "template_*.go"
|
||||
del "gen_*.go"
|
||||
del "tmpl_client/template_*.go"
|
||||
del "gosora.exe"
|
||||
|
||||
echo Generating the dynamic code
|
||||
go generate
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
|
||||
echo Building the executable
|
||||
go build -o gosora.exe -tags no_ws
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
|
||||
echo Building the installer
|
||||
go build ./install
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
|
||||
echo Building the router generator
|
||||
go build ./router_gen
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
|
||||
echo Building the query generator
|
||||
go build ./query_gen
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
echo Gosora was successfully built
|
||||
pause
|
45
build.bat
Normal file
45
build.bat
Normal file
@ -0,0 +1,45 @@
|
||||
@echo off
|
||||
rem TODO: Make these deletes a little less noisy
|
||||
del "template_*.go"
|
||||
del "gen_*.go"
|
||||
cd tmpl_client
|
||||
del "template_*.go"
|
||||
cd ..
|
||||
del "gosora.exe"
|
||||
|
||||
echo Generating the dynamic code
|
||||
go generate
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
|
||||
echo Building the executable
|
||||
go build -o gosora.exe
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
|
||||
echo Building the installer
|
||||
go build ./install
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
|
||||
echo Building the router generator
|
||||
go build ./router_gen
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
|
||||
echo Building the query generator
|
||||
go build ./query_gen
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
echo Gosora was successfully built
|
||||
pause
|
11
build_templates.bat
Normal file
11
build_templates.bat
Normal file
@ -0,0 +1,11 @@
|
||||
echo Building the templates
|
||||
gosora.exe -build-templates
|
||||
|
||||
echo Rebuilding the executable
|
||||
go build -o gosora.exe
|
||||
if %errorlevel% neq 0 (
|
||||
pause
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
|
||||
pause
|
378
chi_router.go
378
chi_router.go
@ -1,378 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"net/http"
|
||||
|
||||
"git.tuxpa.in/a/gosora/common"
|
||||
"git.tuxpa.in/a/gosora/routes"
|
||||
"git.tuxpa.in/a/gosora/routes/panel"
|
||||
"git.tuxpa.in/a/gosora/uutils"
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
)
|
||||
|
||||
type ContextKey string
|
||||
|
||||
var ContextKey_USER ContextKey = "gosora_user"
|
||||
var ContextKey_EXTRADATA ContextKey = "gosora_extradata"
|
||||
|
||||
func AuthContextMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
user_copy, ok := common.PreRoute(w, r)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
user := &user_copy
|
||||
ctx := context.WithValue(r.Context(), ContextKey_USER, user)
|
||||
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
func ParseFormMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
u, ok := r.Context().Value(ContextKey_USER).(*common.User)
|
||||
if !ok {
|
||||
log.Println("error grabbing user from context")
|
||||
return
|
||||
}
|
||||
err := common.ParseForm(w, r, u)
|
||||
if err != nil {
|
||||
log.Println("error parsing form middleware")
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
func NoBannedMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
user, ok := r.Context().Value(ContextKey_USER).(*common.User)
|
||||
if !ok {
|
||||
log.Println("error grabbing user from context")
|
||||
return
|
||||
}
|
||||
err := common.NoBanned(w, r, user)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
func NoSessionMismatchMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
user, ok := r.Context().Value(ContextKey_USER).(*common.User)
|
||||
if !ok {
|
||||
log.Println("error grabbing user from context")
|
||||
return
|
||||
}
|
||||
err := common.NoSessionMismatch(w, r, user)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
func MemberOnlyMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
user, ok := r.Context().Value(ContextKey_USER).(*common.User)
|
||||
if !ok {
|
||||
log.Println("error grabbing user from context")
|
||||
return
|
||||
}
|
||||
err := common.MemberOnly(w, r, user)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
func SuperModOnlyMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
user, ok := r.Context().Value(ContextKey_USER).(*common.User)
|
||||
if !ok {
|
||||
log.Println("error grabbing user from context")
|
||||
return
|
||||
}
|
||||
err := common.SuperModOnly(w, r, user)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
func SuperAdminOnlyMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
user, ok := r.Context().Value(ContextKey_USER).(*common.User)
|
||||
if !ok {
|
||||
log.Println("error grabbing user from context")
|
||||
return
|
||||
}
|
||||
err := common.SuperAdminOnly(w, r, user)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
type UserRoute = func(w http.ResponseWriter, r *http.Request, u *common.User) common.RouteError
|
||||
type ExtraRoute = func(w http.ResponseWriter, r *http.Request, u *common.User, s string) common.RouteError
|
||||
type HeaderRoute = func(w http.ResponseWriter, r *http.Request, u *common.User, h *common.Header) common.RouteError
|
||||
type HeaderExtraRoute = func(w http.ResponseWriter, r *http.Request, u *common.User, h *common.Header, s string) common.RouteError
|
||||
|
||||
func ConvertUserRoute(fn UserRoute) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
u, ok := r.Context().Value(ContextKey_USER).(*common.User)
|
||||
if !ok {
|
||||
log.Println("error grabbing user from context")
|
||||
return
|
||||
}
|
||||
err := fn(w, r, u)
|
||||
if err != nil {
|
||||
log.Println("error handling %s %s", fn, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func ConvertHeaderRoute(fn HeaderRoute) http.HandlerFunc {
|
||||
return ConvertUserRoute(func(w http.ResponseWriter, r *http.Request, u *common.User) common.RouteError {
|
||||
h, err := common.UserCheckNano(w, r, u, uutils.Nanotime())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return fn(w, r, u, h)
|
||||
})
|
||||
}
|
||||
func ConvertExtraRoute(fn ExtraRoute) http.HandlerFunc {
|
||||
return ConvertUserRoute(func(w http.ResponseWriter, r *http.Request, u *common.User) common.RouteError {
|
||||
s := chi.URLParam(r, "extra")
|
||||
return fn(w, r, u, s)
|
||||
})
|
||||
}
|
||||
|
||||
func ConvertHeaderExtraRoute(fn HeaderExtraRoute) http.HandlerFunc {
|
||||
return ConvertHeaderRoute(func(w http.ResponseWriter, r *http.Request, u *common.User, h *common.Header) common.RouteError {
|
||||
s := chi.URLParam(r, "extra")
|
||||
return fn(w, r, u, h, s)
|
||||
})
|
||||
}
|
||||
|
||||
func NewChiRouter() {
|
||||
r := chi.NewRouter()
|
||||
|
||||
// A good base middleware stack
|
||||
r.Use(middleware.RequestID)
|
||||
r.Use(middleware.RealIP)
|
||||
r.Use(middleware.Logger)
|
||||
r.Use(middleware.Recoverer)
|
||||
r.Use(AuthContextMiddleware)
|
||||
|
||||
r.Route("/overview", func(r chi.Router) {
|
||||
r.Get("/", ConvertHeaderRoute(routes.Overview))
|
||||
})
|
||||
r.Route("/pages", func(r chi.Router) {
|
||||
r.Get("/{extra}", ConvertHeaderExtraRoute(routes.CustomPage))
|
||||
})
|
||||
r.Route("/forums", func(r chi.Router) {
|
||||
r.Get("/", ConvertHeaderRoute(routes.ForumList))
|
||||
})
|
||||
r.Route("/forum", func(r chi.Router) {
|
||||
r.Get("/{extra}", ConvertHeaderExtraRoute(routes.ViewForum))
|
||||
})
|
||||
r.Route("/theme", func(r chi.Router) {
|
||||
r.Use(ParseFormMiddleware)
|
||||
r.Get("/", ConvertUserRoute(routes.ChangeTheme))
|
||||
})
|
||||
r.Route("/attachs", func(r chi.Router) {
|
||||
r.Use(ParseFormMiddleware)
|
||||
r.Get("/{extra}", ConvertExtraRoute(routes.ShowAttachment))
|
||||
})
|
||||
r.Route("/ws", func(r chi.Router) {
|
||||
r.Get("/{extra}", ConvertUserRoute(common.RouteWebsockets))
|
||||
})
|
||||
r.Route("/api", func(r chi.Router) {
|
||||
r.Get("/phrases*", ConvertUserRoute(routeAPIPhrases))
|
||||
r.Get("/me*", ConvertUserRoute(routes.APIMe))
|
||||
r.Get("/watches*", ConvertUserRoute(routeJSAntispam))
|
||||
r.Get("/*", ConvertUserRoute(routeAPI))
|
||||
})
|
||||
r.Route("/report", func(r chi.Router) {
|
||||
r.Use(NoBannedMiddleware)
|
||||
r.Use(NoSessionMismatchMiddleware)
|
||||
r.Use(MemberOnlyMiddleware)
|
||||
r.Get("/submit/{extra}", ConvertExtraRoute(routes.ReportSubmit))
|
||||
})
|
||||
r.Route("/topics", func(r chi.Router) {
|
||||
r.Get("/most-viewed", ConvertHeaderRoute(routes.TopicListMostViewed))
|
||||
r.Get("/week-views", ConvertHeaderRoute(routes.TopicListWeekViews))
|
||||
r.Get("/create", ConvertHeaderExtraRoute(routes.CreateTopic))
|
||||
r.Get("/", ConvertHeaderRoute(routes.TopicList))
|
||||
})
|
||||
r.Route("/panel", func(r chi.Router) {
|
||||
r.Use(SuperModOnlyMiddleware)
|
||||
r.Use(NoSessionMismatchMiddleware)
|
||||
r.Route("/forums", func(r chi.Router) {
|
||||
r.Get("/", ConvertUserRoute(panel.Forums))
|
||||
r.Get("/create", ConvertUserRoute(panel.ForumsCreateSubmit))
|
||||
r.Get("/delete/{extra}", ConvertExtraRoute(panel.ForumsDelete))
|
||||
r.Get("/delete/submit/{extra}", ConvertExtraRoute(panel.ForumsDeleteSubmit))
|
||||
r.Get("/order/edit/submit", ConvertUserRoute(panel.ForumsOrderSubmit))
|
||||
r.Get("/edit/{extra}", ConvertExtraRoute(panel.ForumsEdit))
|
||||
r.Get("/edit/submit/{extra}", ConvertExtraRoute(panel.ForumsEditSubmit))
|
||||
r.Get("/edit/perms/submit/{extra}", ConvertExtraRoute(panel.ForumsEditPermsSubmit))
|
||||
r.Get("/edit/perms/{extra}", ConvertExtraRoute(panel.ForumsEditPermsAdvance))
|
||||
r.Get("/edit/perms/adv/submit/{extra}", ConvertExtraRoute(panel.ForumsEditPermsAdvanceSubmit))
|
||||
r.Get("/action/create/submit/{extra}", ConvertExtraRoute(panel.ForumsEditActionCreateSubmit))
|
||||
r.Get("/action/delete/submit/{extra}", ConvertExtraRoute(panel.ForumsEditActionDeleteSubmit))
|
||||
})
|
||||
|
||||
r.Route("/settings", func(r chi.Router) {
|
||||
r.Get("/", ConvertUserRoute(panel.Settings))
|
||||
r.Get("/edit/{extra}", ConvertExtraRoute(panel.SettingEdit))
|
||||
r.Get("/edit/submit/{extra}", ConvertExtraRoute(panel.SettingEditSubmit))
|
||||
r.Get("/word-filters", ConvertUserRoute(panel.WordFilters))
|
||||
r.Get("/word-filters/create", ConvertUserRoute(panel.WordFiltersCreateSubmit))
|
||||
r.Get("/word-filters/edit/{extra}", ConvertExtraRoute(panel.WordFiltersEdit))
|
||||
r.Get("/word-filters/edit/submit/{extra}", ConvertExtraRoute(panel.WordFiltersEdit))
|
||||
r.Get("/word-filters/delete/submit/{extra}", ConvertExtraRoute(panel.WordFiltersDeleteSubmit))
|
||||
})
|
||||
|
||||
r.Route("/pages", func(r chi.Router) {
|
||||
r.Get("/create/submit", ConvertUserRoute(panel.PagesCreateSubmit))
|
||||
r.Get("/edit/{extra}", ConvertExtraRoute(panel.PagesEdit))
|
||||
r.Get("/edit/submit/{extra}", ConvertExtraRoute(panel.PagesEditSubmit))
|
||||
r.Get("/delete/submit/{extra}", ConvertExtraRoute(panel.PagesDeleteSubmit))
|
||||
|
||||
})
|
||||
|
||||
r.Route("/themes", func(r chi.Router) {
|
||||
r.Get("/", ConvertUserRoute(panel.Themes))
|
||||
r.Get("/default/{extra}", ConvertExtraRoute(panel.ThemesSetDefault))
|
||||
r.Get("/menus", ConvertUserRoute(panel.ThemesMenus))
|
||||
r.Get("/menus/edit/{extra}", ConvertExtraRoute(panel.ThemesMenusEdit))
|
||||
r.Get("/menus/item/edit/{extra}", ConvertExtraRoute(panel.ThemesMenuItemEdit))
|
||||
r.Get("/menus/item/edit/submit/{extra}", ConvertExtraRoute(panel.ThemesMenuItemEditSubmit))
|
||||
r.Get("/menus/item/create/submit", ConvertUserRoute(panel.ThemesMenuItemCreateSubmit))
|
||||
r.Get("/menus/item/delete/submit/{extra}", ConvertExtraRoute(panel.ThemesMenuItemDeleteSubmit))
|
||||
r.Get("/menus/item/order/edit/submit/{extra}", ConvertExtraRoute(panel.ThemesMenuItemOrderSubmit))
|
||||
r.Get("/widgets/edit/submit/{extra}", ConvertExtraRoute(panel.ThemesWidgetsEditSubmit))
|
||||
r.Get("/widgets/create/submit", ConvertUserRoute(panel.ThemesWidgetsCreateSubmit))
|
||||
r.Get("/widgets/delete/submit/{extra}", ConvertExtraRoute(panel.ThemesWidgetsDeleteSubmit))
|
||||
})
|
||||
|
||||
r.Route("/plugins", func(r chi.Router) {
|
||||
r.Get("/{extra}", ConvertExtraRoute(panel.PluginsInstall))
|
||||
r.Get("/activate/{extra}", ConvertExtraRoute(panel.PluginsActivate))
|
||||
r.Get("/deactivate/{extra}", ConvertExtraRoute(panel.PluginsDeactivate))
|
||||
r.Get("/install/{extra}", ConvertExtraRoute(panel.PluginsInstall))
|
||||
})
|
||||
})
|
||||
r.Route("/user", func(r chi.Router) {
|
||||
r.Use(MemberOnlyMiddleware)
|
||||
r.Route("/edit", func(r chi.Router) {
|
||||
r.Get("/", ConvertHeaderRoute(routes.AccountEdit))
|
||||
r.Get("/password", ConvertHeaderRoute(routes.AccountEditPassword))
|
||||
r.Get("/password/submit", ConvertUserRoute(routes.AccountEditPasswordSubmit))
|
||||
r.Get("/avatar/submit", ConvertUserRoute(routes.AccountEditAvatarSubmit))
|
||||
r.Get("/avatar/revoke/submit", ConvertUserRoute(routes.AccountEditRevokeAvatarSubmit))
|
||||
r.Get("/username/submit", ConvertUserRoute(routes.AccountEditUsernameSubmit))
|
||||
r.Get("/privacy", ConvertHeaderRoute(routes.AccountEditPrivacy))
|
||||
r.Get("/privacy/submit", ConvertUserRoute(routes.AccountEditPrivacySubmit))
|
||||
r.Get("/mfa", ConvertHeaderRoute(routes.AccountEditMFA))
|
||||
r.Get("/mfa/setup", ConvertHeaderRoute(routes.AccountEditMFASetup))
|
||||
r.Get("/mfa/setup/submit", ConvertUserRoute(routes.AccountEditMFASetupSubmit))
|
||||
r.Get("/mfa/disable/submit", ConvertUserRoute(routes.AccountEditMFADisableSubmit))
|
||||
r.Get("/email", ConvertHeaderRoute(routes.AccountEditEmail))
|
||||
r.Get("/token", ConvertExtraRoute(routes.AccountEditEmailTokenSubmit))
|
||||
r.Get("/logins", ConvertHeaderRoute(routes.AccountLogins))
|
||||
r.Get("/blocked", ConvertHeaderRoute(routes.AccountBlocked))
|
||||
})
|
||||
r.Get("/levels", ConvertHeaderRoute(routes.LevelList))
|
||||
r.Get("/convos", ConvertHeaderRoute(routes.Convos))
|
||||
r.Get("/convos/create", ConvertHeaderRoute(routes.ConvosCreate))
|
||||
r.Get("/convo", ConvertHeaderExtraRoute(routes.Convo))
|
||||
r.Get("/convos/create/submit", ConvertUserRoute(routes.ConvosCreateSubmit))
|
||||
r.Get("/convo/create/submit", ConvertUserRoute(routes.ConvosCreateSubmit))
|
||||
r.Get("/convo/delete/submit", ConvertExtraRoute(routes.ConvosDeleteReplySubmit))
|
||||
r.Get("/convo/edit/submit", ConvertExtraRoute(routes.ConvosEditReplySubmit))
|
||||
r.Get("/block/create", ConvertHeaderExtraRoute(routes.RelationsBlockCreate))
|
||||
r.Get("/block/create/submit", ConvertExtraRoute(routes.RelationsBlockCreateSubmit))
|
||||
r.Get("/block/remove", ConvertHeaderExtraRoute(routes.RelationsBlockRemove))
|
||||
r.Get("/block/remove/submit", ConvertExtraRoute(routes.RelationsBlockRemoveSubmit))
|
||||
r.Get("/{user}", ConvertHeaderRoute(routes.ViewProfile))
|
||||
})
|
||||
r.Route("/users", func(r chi.Router) {
|
||||
r.Use(NoSessionMismatchMiddleware)
|
||||
r.Get("/ban/submit", ConvertExtraRoute(routes.BanUserSubmit))
|
||||
r.Get("/unban", ConvertExtraRoute(routes.UnbanUser))
|
||||
r.Get("/activate", ConvertExtraRoute(routes.ActivateUser))
|
||||
r.Get("/ips", ConvertHeaderRoute(routes.IPSearch))
|
||||
r.Get("/delete-posts/submit", ConvertExtraRoute(routes.DeletePostsSubmit))
|
||||
})
|
||||
r.Route("/topic", func(r chi.Router) {
|
||||
r.Use(MemberOnlyMiddleware)
|
||||
r.Use(NoSessionMismatchMiddleware)
|
||||
r.Get("/create/submit", ConvertUserRoute(routes.CreateTopicSubmit))
|
||||
r.Get("/edit/submit", ConvertExtraRoute(routes.EditTopicSubmit))
|
||||
r.Get("/delete/submit", ConvertUserRoute(routes.DeleteTopicSubmit))
|
||||
r.Get("/stick/submit", ConvertExtraRoute(routes.StickTopicSubmit))
|
||||
r.Get("/unstick/submit", ConvertExtraRoute(routes.UnstickTopicSubmit))
|
||||
r.Get("/lock/submit", ConvertUserRoute(routes.LockTopicSubmit))
|
||||
r.Get("/unlock/submit", ConvertExtraRoute(routes.UnlockTopicSubmit))
|
||||
r.Get("/move/submit", ConvertExtraRoute(routes.MoveTopicSubmit))
|
||||
r.Get("/like/submit", ConvertExtraRoute(routes.LikeTopicSubmit))
|
||||
r.Get("/unlike/submit", ConvertExtraRoute(routes.UnlikeTopicSubmit))
|
||||
r.Get("/attach/add/submit", ConvertExtraRoute(routes.AddAttachToTopicSubmit))
|
||||
r.Get("/attach/remove/submit", ConvertExtraRoute(routes.RemoveAttachFromTopicSubmit))
|
||||
})
|
||||
r.Route("/reply", func(r chi.Router) {
|
||||
r.Use(MemberOnlyMiddleware)
|
||||
r.Get("/create", ConvertUserRoute(routes.ProfileReplyCreateSubmit))
|
||||
r.Get("/edit/submit", ConvertExtraRoute(routes.ReplyEditSubmit))
|
||||
r.Get("/delete/submit", ConvertExtraRoute(routes.ReplyDeleteSubmit))
|
||||
r.Get("/like/submit", ConvertExtraRoute(routes.ReplyLikeSubmit))
|
||||
r.Get("/unlike/submit", ConvertExtraRoute(routes.ReplyUnlikeSubmit))
|
||||
r.Get("/attach/add/submit", ConvertExtraRoute(routes.AddAttachToReplySubmit))
|
||||
r.Get("/attach/remove/submit", ConvertExtraRoute(routes.RemoveAttachFromReplySubmit))
|
||||
})
|
||||
r.Route("/profile", func(r chi.Router) {
|
||||
r.Use(MemberOnlyMiddleware)
|
||||
r.Use(NoSessionMismatchMiddleware)
|
||||
r.Get("/reply/create", ConvertUserRoute(routes.ProfileReplyCreateSubmit))
|
||||
r.Get("/reply/edit/submit", ConvertExtraRoute(routes.ProfileReplyEditSubmit))
|
||||
r.Get("/reply/edit/delete/submit", ConvertExtraRoute(routes.ProfileReplyDeleteSubmit))
|
||||
})
|
||||
r.Route("/poll", func(r chi.Router) {
|
||||
r.Use(MemberOnlyMiddleware)
|
||||
r.Use(NoSessionMismatchMiddleware)
|
||||
r.Get("/poll/vote", ConvertExtraRoute(routes.PollVote))
|
||||
r.Get("/poll/results", ConvertExtraRoute(routes.PollResults))
|
||||
})
|
||||
r.Route("/accounts", func(r chi.Router) {
|
||||
r.Get("/login", ConvertHeaderRoute(routes.AccountLogin))
|
||||
r.Get("/create", ConvertHeaderRoute(routes.AccountRegister))
|
||||
r.Get("/logout", ConvertUserRoute(routes.AccountLogout))
|
||||
r.Get("/login/submit", ConvertUserRoute(routes.AccountLoginSubmit))
|
||||
r.Get("/mfa_verify", ConvertHeaderRoute(routes.AccountLoginMFAVerify))
|
||||
r.Get("/mfa_verify/submit", ConvertUserRoute(routes.AccountLoginMFAVerifySubmit))
|
||||
r.Get("/create/submit", ConvertUserRoute(routes.AccountRegisterSubmit))
|
||||
r.Get("/password-reset", ConvertHeaderRoute(routes.AccountPasswordReset))
|
||||
r.Get("/password-reset/submit", ConvertUserRoute(routes.AccountPasswordResetSubmit))
|
||||
r.Get("/password-reset/token", ConvertHeaderRoute(routes.AccountPasswordResetToken))
|
||||
r.Get("/password-reset/token/submit", ConvertUserRoute(routes.AccountPasswordResetTokenSubmit))
|
||||
})
|
||||
r.Route("/static", func(r chi.Router) {})
|
||||
r.Route("/uploads", func(r chi.Router) {})
|
||||
|
||||
http.ListenAndServe(":3333", r)
|
||||
}
|
@ -1,115 +0,0 @@
|
||||
package hookgen
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"log"
|
||||
"os"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
type HookVars struct {
|
||||
Imports []string
|
||||
Hooks []Hook
|
||||
}
|
||||
|
||||
type Hook struct {
|
||||
Name string
|
||||
Params string
|
||||
Params2 string
|
||||
Ret string
|
||||
Type string
|
||||
Any bool
|
||||
MultiHook bool
|
||||
Skip bool
|
||||
DefaultRet string
|
||||
Pure string
|
||||
}
|
||||
|
||||
func AddHooks(add func(name, params, ret, htype string, multiHook, skip bool, defaultRet, pure string)) {
|
||||
vhookskip := func(name, params string) {
|
||||
add(name, params, "(bool,RouteError)", "VhookSkippable_", false, true, "false,nil", "")
|
||||
}
|
||||
vhookskip("simple_forum_check_pre_perms", "w http.ResponseWriter,r *http.Request,u *User,fid *int,h *HeaderLite")
|
||||
vhookskip("forum_check_pre_perms", "w http.ResponseWriter,r *http.Request,u *User,fid *int,h *Header")
|
||||
vhookskip("router_after_filters", "w http.ResponseWriter,r *http.Request,prefix string")
|
||||
vhookskip("router_pre_route", "w http.ResponseWriter,r *http.Request,u *User,prefix string")
|
||||
vhookskip("route_forum_list_start", "w http.ResponseWriter,r *http.Request,u *User,h *Header")
|
||||
vhookskip("route_topic_list_start", "w http.ResponseWriter,r *http.Request,u *User,h *Header")
|
||||
vhookskip("route_attach_start", "w http.ResponseWriter,r *http.Request,u *User,fname string")
|
||||
vhookskip("route_attach_post_get", "w http.ResponseWriter,r *http.Request,u *User,a *Attachment")
|
||||
|
||||
vhooknoret := func(name, params string) {
|
||||
add(name, params, "", "Vhooks", false, false, "false,nil", "")
|
||||
}
|
||||
vhooknoret("router_end", "w http.ResponseWriter,r *http.Request,u *User,prefix string,extraData string")
|
||||
vhooknoret("topic_reply_row_assign", "r *ReplyUser")
|
||||
vhooknoret("counters_perf_tick_row", "low int64,high int64,avg int64")
|
||||
//forums_frow_assign
|
||||
//Hook(name string, data interface{}) interface{}
|
||||
/*hook := func(name, params, ret, pure string) {
|
||||
add(name,params,ret,"Hooks",true,false,ret,pure)
|
||||
}*/
|
||||
|
||||
hooknoret := func(name, params string) {
|
||||
add(name, params, "", "HooksNoRet", true, false, "", "")
|
||||
}
|
||||
hooknoret("forums_frow_assign", "f *Forum")
|
||||
|
||||
hookskip := func(name, params string) {
|
||||
add(name, params, "(skip bool)", "HooksSkip", true, true, "", "")
|
||||
}
|
||||
//hookskip("forums_frow_assign","f *Forum")
|
||||
hookskip("topic_create_frow_assign", "f *Forum")
|
||||
|
||||
hookss := func(name string) {
|
||||
add(name, "d string", "string", "Sshooks", true, false, "", "d")
|
||||
}
|
||||
hookss("topic_ogdesc_assign")
|
||||
}
|
||||
|
||||
func Write(hookVars HookVars) {
|
||||
fileData := `// Code generated by Gosora's Hook Generator. DO NOT EDIT.
|
||||
/* This file was automatically generated by the software. Please don't edit it as your changes may be overwritten at any moment. */
|
||||
package common
|
||||
import ({{range .Imports}}
|
||||
"{{.}}"{{end}}
|
||||
)
|
||||
{{range .Hooks}}
|
||||
func H_{{.Name}}_hook(t *HookTable,{{.Params}}) {{.Ret}} { {{if .Any}}
|
||||
{{if .MultiHook}}for _, hook := range t.{{.Type}}["{{.Name}}"] {
|
||||
{{if .Skip}}if skip = hook({{.Params2}}); skip {
|
||||
break
|
||||
}{{else}}{{if .Pure}}{{.Pure}} = {{else if .Ret}}return {{end}}hook({{.Params2}}){{end}}
|
||||
}{{else}}hook := t.{{.Type}}["{{.Name}}"]
|
||||
if hook != nil {
|
||||
{{if .Ret}}return {{end}}hook({{.Params2}})
|
||||
} {{end}}{{end}}{{if .Pure}}
|
||||
return {{.Pure}}{{else if .Ret}}
|
||||
return {{.DefaultRet}}{{end}}
|
||||
}{{end}}
|
||||
`
|
||||
tmpl := template.Must(template.New("hooks").Parse(fileData))
|
||||
var b bytes.Buffer
|
||||
if e := tmpl.Execute(&b, hookVars); e != nil {
|
||||
log.Fatal(e)
|
||||
}
|
||||
|
||||
err := writeFile("./common/gen_extend.go", b.String())
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func writeFile(name, body string) error {
|
||||
f, e := os.Create(name)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
if _, e = f.WriteString(body); e != nil {
|
||||
return e
|
||||
}
|
||||
if e = f.Sync(); e != nil {
|
||||
return e
|
||||
}
|
||||
return f.Close()
|
||||
}
|
@ -1,266 +0,0 @@
|
||||
// Work in progress
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"gopkg.in/olivere/elastic.v6"
|
||||
)
|
||||
|
||||
func main() {
|
||||
log.Print("Loading the configuration data")
|
||||
err := c.LoadConfig()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
log.Print("Processing configuration data")
|
||||
err = c.ProcessConfig()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if c.DbConfig.Adapter != "mysql" && c.DbConfig.Adapter != "" {
|
||||
log.Fatal("Only MySQL is supported for upgrades right now, please wait for a newer build of the patcher")
|
||||
}
|
||||
|
||||
err = prepMySQL()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
client, err := elastic.NewClient(elastic.SetErrorLog(log.New(os.Stdout, "ES ", log.LstdFlags)))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
_, _, err = client.Ping("http://127.0.0.1:9200").Do(context.Background())
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
err = setupIndices(client)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
err = setupData(client)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func prepMySQL() error {
|
||||
return qgen.Builder.Init("mysql", map[string]string{
|
||||
"host": c.DbConfig.Host,
|
||||
"port": c.DbConfig.Port,
|
||||
"name": c.DbConfig.Dbname,
|
||||
"username": c.DbConfig.Username,
|
||||
"password": c.DbConfig.Password,
|
||||
"collation": "utf8mb4_general_ci",
|
||||
})
|
||||
}
|
||||
|
||||
type ESIndexBase struct {
|
||||
Mappings ESIndexMappings `json:"mappings"`
|
||||
}
|
||||
|
||||
type ESIndexMappings struct {
|
||||
Doc ESIndexDoc `json:"_doc"`
|
||||
}
|
||||
|
||||
type ESIndexDoc struct {
|
||||
Properties map[string]map[string]string `json:"properties"`
|
||||
}
|
||||
|
||||
type ESDocMap map[string]map[string]string
|
||||
|
||||
func (d ESDocMap) Add(column string, cType string) {
|
||||
d["column"] = map[string]string{"type": cType}
|
||||
}
|
||||
|
||||
func setupIndices(client *elastic.Client) error {
|
||||
exists, err := client.IndexExists("topics").Do(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
deleteIndex, err := client.DeleteIndex("topics").Do(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !deleteIndex.Acknowledged {
|
||||
return errors.New("delete not acknowledged")
|
||||
}
|
||||
}
|
||||
|
||||
docMap := make(ESDocMap)
|
||||
docMap.Add("tid", "integer")
|
||||
docMap.Add("title", "text")
|
||||
docMap.Add("content", "text")
|
||||
docMap.Add("createdBy", "integer")
|
||||
docMap.Add("ip", "ip")
|
||||
docMap.Add("suggest", "completion")
|
||||
indexBase := ESIndexBase{ESIndexMappings{ESIndexDoc{docMap}}}
|
||||
oBytes, err := json.Marshal(indexBase)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
createIndex, err := client.CreateIndex("topics").Body(string(oBytes)).Do(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !createIndex.Acknowledged {
|
||||
return errors.New("not acknowledged")
|
||||
}
|
||||
|
||||
exists, err = client.IndexExists("replies").Do(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
deleteIndex, err := client.DeleteIndex("replies").Do(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !deleteIndex.Acknowledged {
|
||||
return errors.New("delete not acknowledged")
|
||||
}
|
||||
}
|
||||
|
||||
docMap = make(ESDocMap)
|
||||
docMap.Add("rid", "integer")
|
||||
docMap.Add("tid", "integer")
|
||||
docMap.Add("content", "text")
|
||||
docMap.Add("createdBy", "integer")
|
||||
docMap.Add("ip", "ip")
|
||||
docMap.Add("suggest", "completion")
|
||||
indexBase = ESIndexBase{ESIndexMappings{ESIndexDoc{docMap}}}
|
||||
oBytes, err = json.Marshal(indexBase)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
createIndex, err = client.CreateIndex("replies").Body(string(oBytes)).Do(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !createIndex.Acknowledged {
|
||||
return errors.New("not acknowledged")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type ESTopic struct {
|
||||
ID int `json:"tid"`
|
||||
Title string `json:"title"`
|
||||
Content string `json:"content"`
|
||||
CreatedBy int `json:"createdBy"`
|
||||
IP string `json:"ip"`
|
||||
}
|
||||
|
||||
type ESReply struct {
|
||||
ID int `json:"rid"`
|
||||
TID int `json:"tid"`
|
||||
Content string `json:"content"`
|
||||
CreatedBy int `json:"createdBy"`
|
||||
IP string `json:"ip"`
|
||||
}
|
||||
|
||||
func setupData(client *elastic.Client) error {
|
||||
tcount := 4
|
||||
errs := make(chan error)
|
||||
|
||||
go func() {
|
||||
tin := make([]chan ESTopic, tcount)
|
||||
tf := func(tin chan ESTopic) {
|
||||
for {
|
||||
topic, more := <-tin
|
||||
if !more {
|
||||
break
|
||||
}
|
||||
_, err := client.Index().Index("topics").Type("_doc").Id(strconv.Itoa(topic.ID)).BodyJson(topic).Do(context.Background())
|
||||
if err != nil {
|
||||
errs <- err
|
||||
}
|
||||
}
|
||||
}
|
||||
for i := 0; i < 4; i++ {
|
||||
go tf(tin[i])
|
||||
}
|
||||
|
||||
oi := 0
|
||||
err := qgen.NewAcc().Select("topics").Cols("tid,title,content,createdBy,ip").Each(func(rows *sql.Rows) error {
|
||||
t := ESTopic{}
|
||||
err := rows.Scan(&t.ID, &t.Title, &t.Content, &t.CreatedBy, &t.IP)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tin[oi] <- t
|
||||
if oi < 3 {
|
||||
oi++
|
||||
}
|
||||
return nil
|
||||
})
|
||||
for i := 0; i < 4; i++ {
|
||||
close(tin[i])
|
||||
}
|
||||
errs <- err
|
||||
}()
|
||||
|
||||
go func() {
|
||||
rin := make([]chan ESReply, tcount)
|
||||
rf := func(rin chan ESReply) {
|
||||
for {
|
||||
reply, more := <-rin
|
||||
if !more {
|
||||
break
|
||||
}
|
||||
_, err := client.Index().Index("replies").Type("_doc").Id(strconv.Itoa(reply.ID)).BodyJson(reply).Do(context.Background())
|
||||
if err != nil {
|
||||
errs <- err
|
||||
}
|
||||
}
|
||||
}
|
||||
for i := 0; i < 4; i++ {
|
||||
rf(rin[i])
|
||||
}
|
||||
oi := 0
|
||||
err := qgen.NewAcc().Select("replies").Cols("rid,tid,content,createdBy,ip").Each(func(rows *sql.Rows) error {
|
||||
r := ESReply{}
|
||||
err := rows.Scan(&r.ID, &r.TID, &r.Content, &r.CreatedBy, &r.IP)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rin[oi] <- r
|
||||
if oi < 3 {
|
||||
oi++
|
||||
}
|
||||
return nil
|
||||
})
|
||||
for i := 0; i < 4; i++ {
|
||||
close(rin[i])
|
||||
}
|
||||
errs <- err
|
||||
}()
|
||||
|
||||
fin := 0
|
||||
for {
|
||||
err := <-errs
|
||||
if err == nil {
|
||||
fin++
|
||||
if fin == 2 {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
@ -1,70 +0,0 @@
|
||||
//go:build hookgen
|
||||
// +build hookgen
|
||||
|
||||
package main // import "git.tuxpa.in/a/gosora/hook_gen"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
|
||||
h "git.tuxpa.in/a/gosora/cmd/common_hook_gen"
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
_ "git.tuxpa.in/a/gosora/extend"
|
||||
)
|
||||
|
||||
// TODO: Make sure all the errors in this file propagate upwards properly
|
||||
func main() {
|
||||
// Capture panics instead of closing the window at a superhuman speed before the user can read the message on Windows
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
fmt.Println(r)
|
||||
debug.PrintStack()
|
||||
}
|
||||
}()
|
||||
|
||||
hooks := make(map[string]int)
|
||||
for _, pl := range c.Plugins {
|
||||
if len(pl.Meta.Hooks) > 0 {
|
||||
for _, hook := range pl.Meta.Hooks {
|
||||
hooks[hook]++
|
||||
}
|
||||
continue
|
||||
}
|
||||
if pl.Init != nil {
|
||||
if e := pl.Init(pl); e != nil {
|
||||
log.Print("early plugin init err: ", e)
|
||||
return
|
||||
}
|
||||
}
|
||||
if pl.Hooks != nil {
|
||||
log.Print("Hooks not nil for ", pl.UName)
|
||||
for hook, _ := range pl.Hooks {
|
||||
hooks[hook] += 1
|
||||
}
|
||||
}
|
||||
}
|
||||
log.Printf("hooks: %+v\n", hooks)
|
||||
|
||||
imports := []string{"net/http"}
|
||||
hookVars := h.HookVars{imports, nil}
|
||||
var params2sb strings.Builder
|
||||
add := func(name, params, ret, htype string, multiHook, skip bool, defaultRet, pure string) {
|
||||
first := true
|
||||
for _, param := range strings.Split(params, ",") {
|
||||
if !first {
|
||||
params2sb.WriteRune(',')
|
||||
}
|
||||
pspl := strings.Split(strings.ReplaceAll(strings.TrimSpace(param), " ", " "), " ")
|
||||
params2sb.WriteString(pspl[0])
|
||||
first = false
|
||||
}
|
||||
hookVars.Hooks = append(hookVars.Hooks, h.Hook{name, params, params2sb.String(), ret, htype, hooks[name] > 0, multiHook, skip, defaultRet, pure})
|
||||
params2sb.Reset()
|
||||
}
|
||||
|
||||
h.AddHooks(add)
|
||||
h.Write(hookVars)
|
||||
log.Println("Successfully generated the hooks")
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
package main // import "git.tuxpa.in/a/gosora/hook_stub_gen"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
|
||||
h "git.tuxpa.in/a/gosora/cmd/common_hook_gen"
|
||||
)
|
||||
|
||||
// TODO: Make sure all the errors in this file propagate upwards properly
|
||||
func main() {
|
||||
// Capture panics instead of closing the window at a superhuman speed before the user can read the message on Windows
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
fmt.Println(r)
|
||||
debug.PrintStack()
|
||||
}
|
||||
}()
|
||||
|
||||
imports := []string{"net/http"}
|
||||
hookVars := h.HookVars{imports,nil}
|
||||
add := func(name, params, ret, htype string, multiHook, skip bool, defaultRet, pure string) {
|
||||
var params2 string
|
||||
first := true
|
||||
for _, param := range strings.Split(params,",") {
|
||||
if !first {
|
||||
params2 += ","
|
||||
}
|
||||
pspl := strings.Split(strings.ReplaceAll(strings.TrimSpace(param)," "," ")," ")
|
||||
params2 += pspl[0]
|
||||
first = false
|
||||
}
|
||||
hookVars.Hooks = append(hookVars.Hooks, h.Hook{name, params, params2, ret, htype, true, multiHook, skip, defaultRet,pure})
|
||||
}
|
||||
|
||||
h.AddHooks(add)
|
||||
h.Write(hookVars)
|
||||
log.Println("Successfully generated the hooks")
|
||||
}
|
@ -1,321 +0,0 @@
|
||||
/*
|
||||
*
|
||||
* Gosora Installer
|
||||
* Copyright Azareal 2017 - 2019
|
||||
*
|
||||
*/
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime/debug"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"git.tuxpa.in/a/gosora/install"
|
||||
)
|
||||
|
||||
var scanner *bufio.Scanner
|
||||
|
||||
var siteShortName string
|
||||
var siteName string
|
||||
var siteURL string
|
||||
var serverPort string
|
||||
|
||||
var defaultAdapter = "mysql"
|
||||
var defaultHost = "localhost"
|
||||
var defaultUsername = "root"
|
||||
var defaultDbname = "gosora"
|
||||
var defaultSiteShortName = "SN"
|
||||
var defaultSiteName = "Site Name"
|
||||
var defaultsiteURL = "localhost"
|
||||
var defaultServerPort = "80" // 8080's a good one, if you're testing and don't want it to clash with port 80
|
||||
|
||||
func main() {
|
||||
// Capture panics instead of closing the window at a superhuman speed before the user can read the message on Windows
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
fmt.Println(r)
|
||||
debug.PrintStack()
|
||||
pressAnyKey()
|
||||
return
|
||||
}
|
||||
}()
|
||||
|
||||
scanner = bufio.NewScanner(os.Stdin)
|
||||
fmt.Println("Welcome to Gosora's Installer")
|
||||
fmt.Println("We're going to take you through a few steps to help you get started :)")
|
||||
adap, ok := handleDatabaseDetails()
|
||||
if !ok {
|
||||
err := scanner.Err()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
} else {
|
||||
err = errors.New("Something went wrong!")
|
||||
}
|
||||
abortError(err)
|
||||
return
|
||||
}
|
||||
|
||||
if !getSiteDetails() {
|
||||
err := scanner.Err()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
} else {
|
||||
err = errors.New("Something went wrong!")
|
||||
}
|
||||
abortError(err)
|
||||
return
|
||||
}
|
||||
|
||||
err := adap.InitDatabase()
|
||||
if err != nil {
|
||||
abortError(err)
|
||||
return
|
||||
}
|
||||
|
||||
err = adap.TableDefs()
|
||||
if err != nil {
|
||||
abortError(err)
|
||||
return
|
||||
}
|
||||
|
||||
err = adap.CreateAdmin()
|
||||
if err != nil {
|
||||
abortError(err)
|
||||
return
|
||||
}
|
||||
|
||||
err = adap.InitialData()
|
||||
if err != nil {
|
||||
abortError(err)
|
||||
return
|
||||
}
|
||||
|
||||
configContents := []byte(`{
|
||||
"Site": {
|
||||
"ShortName":"` + siteShortName + `",
|
||||
"Name":"` + siteName + `",
|
||||
"URL":"` + siteURL + `",
|
||||
"Port":"` + serverPort + `",
|
||||
"EnableSsl":false,
|
||||
"EnableEmails":false,
|
||||
"HasProxy":false,
|
||||
"Language": "english"
|
||||
},
|
||||
"Config": {
|
||||
"SslPrivkey": "",
|
||||
"SslFullchain": "",
|
||||
"SMTPServer": "",
|
||||
"SMTPUsername": "",
|
||||
"SMTPPassword": "",
|
||||
"SMTPPort": "25",
|
||||
|
||||
"MaxRequestSizeStr":"5MB",
|
||||
"UserCache":"static",
|
||||
"TopicCache":"static",
|
||||
"ReplyCache":"static",
|
||||
"UserCacheCapacity":180,
|
||||
"TopicCacheCapacity":400,
|
||||
"ReplyCacheCapacity":20,
|
||||
"DefaultPath":"/topics/",
|
||||
"DefaultGroup":3,
|
||||
"ActivationGroup":5,
|
||||
"StaffCSS":"staff_post",
|
||||
"DefaultForum":2,
|
||||
"MinifyTemplates":true,
|
||||
"BuildSlugs":true,
|
||||
"ServerCount":1,
|
||||
"Noavatar":"https://api.adorable.io/avatars/{width}/{id}.png",
|
||||
"ItemsPerPage":25
|
||||
},
|
||||
"Database": {
|
||||
"Adapter": "` + adap.Name() + `",
|
||||
"Host": "` + adap.DBHost() + `",
|
||||
"Username": "` + adap.DBUsername() + `",
|
||||
"Password": "` + adap.DBPassword() + `",
|
||||
"Dbname": "` + adap.DBName() + `",
|
||||
"Port": "` + adap.DBPort() + `",
|
||||
|
||||
"TestAdapter": "` + adap.Name() + `",
|
||||
"TestHost": "",
|
||||
"TestUsername": "",
|
||||
"TestPassword": "",
|
||||
"TestDbname": "",
|
||||
"TestPort": ""
|
||||
},
|
||||
"Dev": {
|
||||
"DebugMode":true,
|
||||
"SuperDebug":false
|
||||
}
|
||||
}`)
|
||||
|
||||
fmt.Println("Opening the configuration file")
|
||||
configFile, err := os.Create("./config/config.json")
|
||||
if err != nil {
|
||||
abortError(err)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Println("Writing to the configuration file...")
|
||||
_, err = configFile.Write(configContents)
|
||||
if err != nil {
|
||||
abortError(err)
|
||||
return
|
||||
}
|
||||
|
||||
configFile.Sync()
|
||||
configFile.Close()
|
||||
fmt.Println("Finished writing to the configuration file")
|
||||
|
||||
fmt.Println("Yay, you have successfully installed Gosora!")
|
||||
fmt.Println("Your name is Admin and you can login with the password 'password'. Don't forget to change it! Seriously. It's really insecure.")
|
||||
pressAnyKey()
|
||||
}
|
||||
|
||||
func abortError(err error) {
|
||||
fmt.Println(err)
|
||||
fmt.Println("Aborting installation...")
|
||||
pressAnyKey()
|
||||
}
|
||||
|
||||
func handleDatabaseDetails() (adap install.InstallAdapter, ok bool) {
|
||||
var dbAdapter string
|
||||
var dbHost string
|
||||
var dbUsername string
|
||||
var dbPassword string
|
||||
var dbName string
|
||||
// TODO: Let the admin set the database port?
|
||||
//var dbPort string
|
||||
|
||||
for {
|
||||
fmt.Println("Which database adapter do you wish to use? mysql or mssql? Default: mysql")
|
||||
if !scanner.Scan() {
|
||||
return nil, false
|
||||
}
|
||||
dbAdapter := strings.TrimSpace(scanner.Text())
|
||||
if dbAdapter == "" {
|
||||
dbAdapter = defaultAdapter
|
||||
}
|
||||
adap, ok = install.Lookup(dbAdapter)
|
||||
if ok {
|
||||
break
|
||||
}
|
||||
fmt.Println("That adapter doesn't exist")
|
||||
}
|
||||
fmt.Println("Set database adapter to " + dbAdapter)
|
||||
|
||||
fmt.Println("Database Host? Default: " + defaultHost)
|
||||
if !scanner.Scan() {
|
||||
return nil, false
|
||||
}
|
||||
dbHost = scanner.Text()
|
||||
if dbHost == "" {
|
||||
dbHost = defaultHost
|
||||
}
|
||||
fmt.Println("Set database host to " + dbHost)
|
||||
|
||||
fmt.Println("Database Username? Default: " + defaultUsername)
|
||||
if !scanner.Scan() {
|
||||
return nil, false
|
||||
}
|
||||
dbUsername = scanner.Text()
|
||||
if dbUsername == "" {
|
||||
dbUsername = defaultUsername
|
||||
}
|
||||
fmt.Println("Set database username to " + dbUsername)
|
||||
|
||||
fmt.Println("Database Password? Default: ''")
|
||||
if !scanner.Scan() {
|
||||
return nil, false
|
||||
}
|
||||
dbPassword = scanner.Text()
|
||||
if len(dbPassword) == 0 {
|
||||
fmt.Println("You didn't set a password for this user. This won't block the installation process, but it might create security issues in the future.")
|
||||
fmt.Println("")
|
||||
} else {
|
||||
fmt.Println("Set password to " + obfuscatePassword(dbPassword))
|
||||
}
|
||||
|
||||
fmt.Println("Database Name? Pick a name you like or one provided to you. Default: " + defaultDbname)
|
||||
if !scanner.Scan() {
|
||||
return nil, false
|
||||
}
|
||||
dbName = scanner.Text()
|
||||
if dbName == "" {
|
||||
dbName = defaultDbname
|
||||
}
|
||||
fmt.Println("Set database name to " + dbName)
|
||||
|
||||
adap.SetConfig(dbHost, dbUsername, dbPassword, dbName, adap.DefaultPort())
|
||||
return adap, true
|
||||
}
|
||||
|
||||
func getSiteDetails() bool {
|
||||
fmt.Println("Okay. We also need to know some actual information about your site!")
|
||||
fmt.Println("What's your site's name? Default: " + defaultSiteName)
|
||||
if !scanner.Scan() {
|
||||
return false
|
||||
}
|
||||
siteName = scanner.Text()
|
||||
if siteName == "" {
|
||||
siteName = defaultSiteName
|
||||
}
|
||||
fmt.Println("Set the site name to " + siteName)
|
||||
|
||||
// ? - We could compute this based on the first letter of each word in the site's name, if it's name spans multiple words. I'm not sure how to do this for single word names.
|
||||
fmt.Println("Can we have a short abbreviation for your site? Default: " + defaultSiteShortName)
|
||||
if !scanner.Scan() {
|
||||
return false
|
||||
}
|
||||
siteShortName = scanner.Text()
|
||||
if siteShortName == "" {
|
||||
siteShortName = defaultSiteShortName
|
||||
}
|
||||
fmt.Println("Set the short name to " + siteShortName)
|
||||
|
||||
fmt.Println("What's your site's url? Default: " + defaultsiteURL)
|
||||
if !scanner.Scan() {
|
||||
return false
|
||||
}
|
||||
siteURL = scanner.Text()
|
||||
if siteURL == "" {
|
||||
siteURL = defaultsiteURL
|
||||
}
|
||||
fmt.Println("Set the site url to " + siteURL)
|
||||
|
||||
fmt.Println("What port do you want the server to listen on? If you don't know what this means, you should probably leave it on the default. Default: " + defaultServerPort)
|
||||
if !scanner.Scan() {
|
||||
return false
|
||||
}
|
||||
serverPort = scanner.Text()
|
||||
if serverPort == "" {
|
||||
serverPort = defaultServerPort
|
||||
}
|
||||
_, err := strconv.Atoi(serverPort)
|
||||
if err != nil {
|
||||
fmt.Println("That's not a valid number!")
|
||||
return false
|
||||
}
|
||||
fmt.Println("Set the server port to " + serverPort)
|
||||
return true
|
||||
}
|
||||
|
||||
func obfuscatePassword(password string) (out string) {
|
||||
for i := 0; i < len(password); i++ {
|
||||
out += "*"
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func pressAnyKey() {
|
||||
//fmt.Println("Press any key to exit...")
|
||||
fmt.Println("Please press enter to exit...")
|
||||
for scanner.Scan() {
|
||||
_ = scanner.Text()
|
||||
return
|
||||
}
|
||||
}
|
@ -1,408 +0,0 @@
|
||||
/* WIP Under Construction */
|
||||
package main // import "git.tuxpa.in/a/gosora/query_gen"
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"runtime/debug"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
// TODO: Make sure all the errors in this file propagate upwards properly
|
||||
func main() {
|
||||
// Capture panics instead of closing the window at a superhuman speed before the user can read the message on Windows
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
fmt.Println(r)
|
||||
debug.PrintStack()
|
||||
return
|
||||
}
|
||||
}()
|
||||
|
||||
log.Println("Running the query generator")
|
||||
for _, a := range qgen.Registry {
|
||||
log.Printf("Building the queries for the %s adapter", a.GetName())
|
||||
qgen.Install.SetAdapterInstance(a)
|
||||
qgen.Install.AddPlugins(NewPrimaryKeySpitter()) // TODO: Do we really need to fill the spitter for every adapter?
|
||||
|
||||
e := writeStatements(a)
|
||||
if e != nil {
|
||||
log.Print(e)
|
||||
}
|
||||
e = qgen.Install.Write()
|
||||
if e != nil {
|
||||
log.Print(e)
|
||||
}
|
||||
e = a.Write()
|
||||
if e != nil {
|
||||
log.Print(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// nolint
|
||||
func writeStatements(a qgen.Adapter) (err error) {
|
||||
e := func(f func(qgen.Adapter) error) {
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
err = f(a)
|
||||
}
|
||||
e(createTables)
|
||||
e(seedTables)
|
||||
e(writeSelects)
|
||||
e(writeLeftJoins)
|
||||
e(writeInnerJoins)
|
||||
e(writeInserts)
|
||||
e(writeUpdates)
|
||||
e(writeDeletes)
|
||||
e(writeSimpleCounts)
|
||||
e(writeInsertSelects)
|
||||
e(writeInsertLeftJoins)
|
||||
e(writeInsertInnerJoins)
|
||||
return err
|
||||
}
|
||||
|
||||
type si = map[string]interface{}
|
||||
type tK = tblKey
|
||||
|
||||
func seedTables(a qgen.Adapter) error {
|
||||
qgen.Install.AddIndex("topics", "parentID", "parentID")
|
||||
qgen.Install.AddIndex("replies", "tid", "tid")
|
||||
qgen.Install.AddIndex("polls", "parentID", "parentID")
|
||||
qgen.Install.AddIndex("likes", "targetItem", "targetItem")
|
||||
qgen.Install.AddIndex("emails", "uid", "uid")
|
||||
qgen.Install.AddIndex("attachments", "originID", "originID")
|
||||
qgen.Install.AddIndex("attachments", "path", "path")
|
||||
qgen.Install.AddIndex("activity_stream_matches", "watcher", "watcher")
|
||||
// TODO: Remove these keys to save space when Elasticsearch is active?
|
||||
//qgen.Install.AddKey("topics", "title", tK{"title", "fulltext", "", false})
|
||||
//qgen.Install.AddKey("topics", "content", tK{"content", "fulltext", "", false})
|
||||
//qgen.Install.AddKey("topics", "title,content", tK{"title,content", "fulltext", "", false})
|
||||
//qgen.Install.AddKey("replies", "content", tK{"content", "fulltext", "", false})
|
||||
|
||||
insert := func(tbl, cols, vals string) {
|
||||
qgen.Install.SimpleInsert(tbl, cols, vals)
|
||||
}
|
||||
insert("sync", "last_update", "UTC_TIMESTAMP()")
|
||||
addSetting := func(name, content, stype string, constraints ...string) {
|
||||
if strings.Contains(name, "'") {
|
||||
panic("name contains '")
|
||||
}
|
||||
if strings.Contains(stype, "'") {
|
||||
panic("stype contains '")
|
||||
}
|
||||
// TODO: Add more field validators
|
||||
cols := "name,content,type"
|
||||
if len(constraints) > 0 {
|
||||
cols += ",constraints"
|
||||
}
|
||||
q := func(s string) string {
|
||||
return "'" + s + "'"
|
||||
}
|
||||
c := func() string {
|
||||
if len(constraints) == 0 {
|
||||
return ""
|
||||
}
|
||||
return "," + q(constraints[0])
|
||||
}
|
||||
insert("settings", cols, q(name)+","+q(content)+","+q(stype)+c())
|
||||
}
|
||||
addSetting("activation_type", "1", "list", "1-3")
|
||||
addSetting("bigpost_min_words", "250", "int")
|
||||
addSetting("megapost_min_words", "1000", "int")
|
||||
addSetting("meta_desc", "", "html-attribute")
|
||||
addSetting("rapid_loading", "1", "bool")
|
||||
addSetting("google_site_verify", "", "html-attribute")
|
||||
addSetting("avatar_visibility", "0", "list", "0-1")
|
||||
insert("themes", "uname, default", "'cosora',1")
|
||||
insert("emails", "email, uid, validated", "'admin@localhost',1,1") // ? - Use a different default email or let the admin input it during installation?
|
||||
|
||||
/*
|
||||
The Permissions:
|
||||
|
||||
Global Permissions:
|
||||
BanUsers
|
||||
ActivateUsers
|
||||
EditUser
|
||||
EditUserEmail
|
||||
EditUserPassword
|
||||
EditUserGroup
|
||||
EditUserGroupSuperMod
|
||||
EditUserGroupAdmin
|
||||
EditGroup
|
||||
EditGroupLocalPerms
|
||||
EditGroupGlobalPerms
|
||||
EditGroupSuperMod
|
||||
EditGroupAdmin
|
||||
ManageForums
|
||||
EditSettings
|
||||
ManageThemes
|
||||
ManagePlugins
|
||||
ViewAdminLogs
|
||||
ViewIPs
|
||||
|
||||
Non-staff Global Permissions:
|
||||
UploadFiles
|
||||
UploadAvatars
|
||||
UseConvos
|
||||
UseConvosOnlyWithMod
|
||||
CreateProfileReply
|
||||
AutoEmbed
|
||||
AutoLink
|
||||
// CreateConvo ?
|
||||
// CreateConvoReply ?
|
||||
|
||||
Forum Permissions:
|
||||
ViewTopic
|
||||
LikeItem
|
||||
CreateTopic
|
||||
EditTopic
|
||||
DeleteTopic
|
||||
CreateReply
|
||||
EditReply
|
||||
DeleteReply
|
||||
PinTopic
|
||||
CloseTopic
|
||||
MoveTopic
|
||||
*/
|
||||
|
||||
p := func(perms *c.Perms) string {
|
||||
jBytes, err := json.Marshal(perms)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return string(jBytes)
|
||||
}
|
||||
addGroup := func(name string, perms c.Perms, mod, admin, banned bool, tag string) {
|
||||
mi, ai, bi := "0", "0", "0"
|
||||
if mod {
|
||||
mi = "1"
|
||||
}
|
||||
if admin {
|
||||
ai = "1"
|
||||
}
|
||||
if banned {
|
||||
bi = "1"
|
||||
}
|
||||
insert("users_groups", "name, permissions, plugin_perms, is_mod, is_admin, is_banned, tag", `'`+name+`','`+p(&perms)+`','{}',`+mi+`,`+ai+`,`+bi+`,"`+tag+`"`)
|
||||
}
|
||||
|
||||
perms := c.AllPerms
|
||||
perms.EditUserGroupAdmin = false
|
||||
perms.EditGroupAdmin = false
|
||||
addGroup("Administrator", perms, true, true, false, "Admin")
|
||||
|
||||
perms = c.Perms{BanUsers: true, ActivateUsers: true, EditUser: true, EditUserEmail: false, EditUserGroup: true, ViewIPs: true, UploadFiles: true, UploadAvatars: true, UseConvos: true, UseConvosOnlyWithMod: true, CreateProfileReply: true, AutoEmbed: true, AutoLink: true, ViewTopic: true, LikeItem: true, CreateTopic: true, EditTopic: true, DeleteTopic: true, CreateReply: true, EditReply: true, DeleteReply: true, PinTopic: true, CloseTopic: true, MoveTopic: true}
|
||||
addGroup("Moderator", perms, true, false, false, "Mod")
|
||||
|
||||
perms = c.Perms{UploadFiles: true, UploadAvatars: true, UseConvos: true, UseConvosOnlyWithMod: true, CreateProfileReply: true, AutoEmbed: true, AutoLink: true, ViewTopic: true, LikeItem: true, CreateTopic: true, CreateReply: true}
|
||||
addGroup("Member", perms, false, false, false, "")
|
||||
|
||||
perms = c.Perms{ViewTopic: true}
|
||||
addGroup("Banned", perms, false, false, true, "")
|
||||
addGroup("Awaiting Activation", c.Perms{ViewTopic: true, UseConvosOnlyWithMod: true}, false, false, false, "")
|
||||
addGroup("Not Loggedin", perms, false, false, false, "Guest")
|
||||
|
||||
//
|
||||
// TODO: Stop processFields() from stripping the spaces in the descriptions in the next commit
|
||||
|
||||
insert("forums", "name, active, desc, tmpl", "'Reports',0,'All the reports go here',''")
|
||||
insert("forums", "name, lastTopicID, lastReplyerID, desc, tmpl", "'General',1,1,'A place for general discussions which don't fit elsewhere',''")
|
||||
|
||||
//
|
||||
|
||||
/*var addForumPerm = func(gid, fid int, permStr string) {
|
||||
insert("forums_permissions", "gid, fid, permissions", strconv.Itoa(gid)+`,`+strconv.Itoa(fid)+`,'`+permStr+`'`)
|
||||
}*/
|
||||
|
||||
insert("forums_permissions", "gid, fid, permissions", `1,1,'{"ViewTopic":true,"CreateReply":true,"CreateTopic":true,"PinTopic":true,"CloseTopic":true}'`)
|
||||
insert("forums_permissions", "gid, fid, permissions", `2,1,'{"ViewTopic":true,"CreateReply":true,"CloseTopic":true}'`)
|
||||
insert("forums_permissions", "gid, fid, permissions", "3,1,'{}'")
|
||||
insert("forums_permissions", "gid, fid, permissions", "4,1,'{}'")
|
||||
insert("forums_permissions", "gid, fid, permissions", "5,1,'{}'")
|
||||
insert("forums_permissions", "gid, fid, permissions", "6,1,'{}'")
|
||||
|
||||
//
|
||||
|
||||
insert("forums_permissions", "gid, fid, permissions", `1,2,'{"ViewTopic":true,"CreateReply":true,"CreateTopic":true,"LikeItem":true,"EditTopic":true,"DeleteTopic":true,"EditReply":true,"DeleteReply":true,"PinTopic":true,"CloseTopic":true,"MoveTopic":true}'`)
|
||||
|
||||
insert("forums_permissions", "gid, fid, permissions", `2,2,'{"ViewTopic":true,"CreateReply":true,"CreateTopic":true,"LikeItem":true,"EditTopic":true,"DeleteTopic":true,"EditReply":true,"DeleteReply":true,"PinTopic":true,"CloseTopic":true,"MoveTopic":true}'`)
|
||||
|
||||
insert("forums_permissions", "gid, fid, permissions", `3,2,'{"ViewTopic":true,"CreateReply":true,"CreateTopic":true,"LikeItem":true}'`)
|
||||
|
||||
insert("forums_permissions", "gid, fid, permissions", `4,2,'{"ViewTopic":true}'`)
|
||||
|
||||
insert("forums_permissions", "gid, fid, permissions", `5,2,'{"ViewTopic":true}'`)
|
||||
|
||||
insert("forums_permissions", "gid, fid, permissions", `6,2,'{"ViewTopic":true}'`)
|
||||
|
||||
//
|
||||
|
||||
insert("topics", "title, content, parsed_content, createdAt, lastReplyAt, lastReplyBy, createdBy, parentID, ip", "'Test Topic','A topic automatically generated by the software.','A topic automatically generated by the software.',UTC_TIMESTAMP(),UTC_TIMESTAMP(),1,1,2,''")
|
||||
|
||||
insert("replies", "tid, content, parsed_content, createdAt, createdBy, lastUpdated, lastEdit, lastEditBy, ip", "1,'A reply!','A reply!',UTC_TIMESTAMP(),1,UTC_TIMESTAMP(),0,0,''")
|
||||
|
||||
insert("menus", "", "")
|
||||
|
||||
// Go maps have a random iteration order, so we have to do this, otherwise the schema files will become unstable and harder to audit
|
||||
order := 0
|
||||
mOrder := "mid, name, htmlID, cssClass, position, path, aria, tooltip, guestOnly, memberOnly, staffOnly, adminOnly"
|
||||
addMenuItem := func(data map[string]interface{}) {
|
||||
if data["mid"] == nil {
|
||||
data["mid"] = 1
|
||||
}
|
||||
if data["position"] == nil {
|
||||
data["position"] = "left"
|
||||
}
|
||||
cols, values := qgen.InterfaceMapToInsertStrings(data, mOrder)
|
||||
insert("menu_items", cols+", order", values+","+strconv.Itoa(order))
|
||||
order++
|
||||
}
|
||||
|
||||
addMenuItem(si{"name": "{lang.menu_forums}", "htmlID": "menu_forums", "path": "/forums/", "aria": "{lang.menu_forums_aria}", "tooltip": "{lang.menu_forums_tooltip}"})
|
||||
|
||||
addMenuItem(si{"name": "{lang.menu_topics}", "htmlID": "menu_topics", "cssClass": "menu_topics", "path": "/topics/", "aria": "{lang.menu_topics_aria}", "tooltip": "{lang.menu_topics_tooltip}"})
|
||||
|
||||
addMenuItem(si{"htmlID": "general_alerts", "cssClass": "menu_alerts", "position": "right", "tmplName": "menu_alerts"})
|
||||
|
||||
addMenuItem(si{"name": "{lang.menu_account}", "cssClass": "menu_account", "path": "/user/edit/", "aria": "{lang.menu_account_aria}", "tooltip": "{lang.menu_account_tooltip}", "memberOnly": true})
|
||||
|
||||
addMenuItem(si{"name": "{lang.menu_profile}", "cssClass": "menu_profile", "path": "{me.Link}", "aria": "{lang.menu_profile_aria}", "tooltip": "{lang.menu_profile_tooltip}", "memberOnly": true})
|
||||
|
||||
addMenuItem(si{"name": "{lang.menu_panel}", "cssClass": "menu_panel menu_account", "path": "/panel/", "aria": "{lang.menu_panel_aria}", "tooltip": "{lang.menu_panel_tooltip}", "memberOnly": true, "staffOnly": true})
|
||||
|
||||
addMenuItem(si{"name": "{lang.menu_logout}", "cssClass": "menu_logout", "path": "/accounts/logout/?s={me.Session}", "aria": "{lang.menu_logout_aria}", "tooltip": "{lang.menu_logout_tooltip}", "memberOnly": true})
|
||||
|
||||
addMenuItem(si{"name": "{lang.menu_register}", "cssClass": "menu_register", "path": "/accounts/create/", "aria": "{lang.menu_register_aria}", "tooltip": "{lang.menu_register_tooltip}", "guestOnly": true})
|
||||
|
||||
addMenuItem(si{"name": "{lang.menu_login}", "cssClass": "menu_login", "path": "/accounts/login/", "aria": "{lang.menu_login_aria}", "tooltip": "{lang.menu_login_tooltip}", "guestOnly": true})
|
||||
|
||||
/*var fSet []string
|
||||
for _, table := range tables {
|
||||
fSet = append(fSet, "'"+table+"'")
|
||||
}
|
||||
qgen.Install.SimpleBulkInsert("tables", "name", fSet)*/
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ? - What is this for?
|
||||
/*func copyInsertMap(in map[string]interface{}) (out map[string]interface{}) {
|
||||
out = make(map[string]interface{})
|
||||
for col, value := range in {
|
||||
out[col] = value
|
||||
}
|
||||
return out
|
||||
}*/
|
||||
|
||||
type LitStr string
|
||||
|
||||
func writeSelects(a qgen.Adapter) error {
|
||||
b := a.Builder()
|
||||
|
||||
// Looking for getTopic? Your statement is in another castle
|
||||
|
||||
//b.Select("isPluginInstalled").Table("plugins").Columns("installed").Where("uname = ?").Parse()
|
||||
|
||||
b.Select("forumEntryExists").Table("forums").Columns("fid").Where("name = ''").Orderby("fid ASC").Limit("0,1").Parse()
|
||||
|
||||
b.Select("groupEntryExists").Table("users_groups").Columns("gid").Where("name = ''").Orderby("gid ASC").Limit("0,1").Parse()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeLeftJoins(a qgen.Adapter) error {
|
||||
a.SimpleLeftJoin("getForumTopics", "topics", "users", "topics.tid, topics.title, topics.content, topics.createdBy, topics.is_closed, topics.sticky, topics.createdAt, topics.lastReplyAt, topics.parentID, users.name, users.avatar", "topics.createdBy = users.uid", "topics.parentID = ?", "topics.sticky DESC, topics.lastReplyAt DESC, topics.createdBy desc", "")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeInnerJoins(a qgen.Adapter) (err error) {
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeInserts(a qgen.Adapter) error {
|
||||
b := a.Builder()
|
||||
|
||||
b.Insert("addForumPermsToForum").Table("forums_permissions").Columns("gid,fid,preset,permissions").Fields("?,?,?,?").Parse()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeUpdates(a qgen.Adapter) error {
|
||||
b := a.Builder()
|
||||
|
||||
b.Update("updateEmail").Table("emails").Set("email = ?, uid = ?, validated = ?, token = ?").Where("email = ?").Parse()
|
||||
|
||||
b.Update("setTempGroup").Table("users").Set("temp_group = ?").Where("uid = ?").Parse()
|
||||
|
||||
b.Update("bumpSync").Table("sync").Set("last_update = UTC_TIMESTAMP()").Parse()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeDeletes(a qgen.Adapter) error {
|
||||
b := a.Builder()
|
||||
|
||||
//b.Delete("deleteForumPermsByForum").Table("forums_permissions").Where("fid=?").Parse()
|
||||
|
||||
b.Delete("deleteActivityStreamMatch").Table("activity_stream_matches").Where("watcher=? AND asid=?").Parse()
|
||||
//b.Delete("deleteActivityStreamMatchesByWatcher").Table("activity_stream_matches").Where("watcher=?").Parse()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeSimpleCounts(a qgen.Adapter) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeInsertSelects(a qgen.Adapter) error {
|
||||
/*a.SimpleInsertSelect("addForumPermsToForumAdmins",
|
||||
qgen.DB_Insert{"forums_permissions", "gid, fid, preset, permissions", ""},
|
||||
qgen.DB_Select{"users_groups", "gid, ? AS fid, ? AS preset, ? AS permissions", "is_admin = 1", "", ""},
|
||||
)*/
|
||||
|
||||
/*a.SimpleInsertSelect("addForumPermsToForumStaff",
|
||||
qgen.DB_Insert{"forums_permissions", "gid, fid, preset, permissions", ""},
|
||||
qgen.DB_Select{"users_groups", "gid, ? AS fid, ? AS preset, ? AS permissions", "is_admin = 0 AND is_mod = 1", "", ""},
|
||||
)*/
|
||||
|
||||
/*a.SimpleInsertSelect("addForumPermsToForumMembers",
|
||||
qgen.DB_Insert{"forums_permissions", "gid, fid, preset, permissions", ""},
|
||||
qgen.DB_Select{"users_groups", "gid, ? AS fid, ? AS preset, ? AS permissions", "is_admin = 0 AND is_mod = 0 AND is_banned = 0", "", ""},
|
||||
)*/
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// nolint
|
||||
func writeInsertLeftJoins(a qgen.Adapter) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeInsertInnerJoins(a qgen.Adapter) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeFile(name, content string) (err error) {
|
||||
f, err := os.Create(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = f.WriteString(content)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = f.Sync()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return f.Close()
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
type PrimaryKeySpitter struct {
|
||||
keys map[string]string
|
||||
}
|
||||
|
||||
func NewPrimaryKeySpitter() *PrimaryKeySpitter {
|
||||
return &PrimaryKeySpitter{make(map[string]string)}
|
||||
}
|
||||
|
||||
func (spit *PrimaryKeySpitter) Hook(name string, args ...interface{}) error {
|
||||
if name == "CreateTableStart" {
|
||||
var found string
|
||||
var table = args[0].(*qgen.DBInstallTable)
|
||||
for _, key := range table.Keys {
|
||||
if key.Type == "primary" {
|
||||
expl := strings.Split(key.Columns, ",")
|
||||
if len(expl) > 1 {
|
||||
continue
|
||||
}
|
||||
found = key.Columns
|
||||
}
|
||||
if found != "" {
|
||||
table := table.Name
|
||||
spit.keys[table] = found
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (spit *PrimaryKeySpitter) Write() error {
|
||||
out := `// Generated by Gosora's Query Generator. DO NOT EDIT.
|
||||
package main
|
||||
|
||||
var dbTablePrimaryKeys = map[string]string{
|
||||
`
|
||||
for table, key := range spit.keys {
|
||||
out += "\t\"" + table + "\":\"" + key + "\",\n"
|
||||
}
|
||||
return writeFile("./gen_tables.go", out+"}\n")
|
||||
}
|
@ -1,831 +0,0 @@
|
||||
package main
|
||||
|
||||
import qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
|
||||
var mysqlPre = "utf8mb4"
|
||||
var mysqlCol = "utf8mb4_general_ci"
|
||||
|
||||
var tables []string
|
||||
|
||||
type tblColumn = qgen.DBTableColumn
|
||||
type tC = tblColumn
|
||||
type tblKey = qgen.DBTableKey
|
||||
|
||||
func createTables(a qgen.Adapter) error {
|
||||
tables = nil
|
||||
f := func(table, charset, collation string, cols []tC, keys []tblKey) error {
|
||||
tables = append(tables, table)
|
||||
return qgen.Install.CreateTable(table, charset, collation, cols, keys)
|
||||
}
|
||||
return createTables2(a, f)
|
||||
}
|
||||
|
||||
func createTables2(a qgen.Adapter, f func(table, charset, collation string, columns []tC, keys []tblKey) error) (err error) {
|
||||
createTable := func(table, charset, collation string, cols []tC, keys []tblKey) {
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
err = f(table, charset, collation, cols, keys)
|
||||
}
|
||||
bcol := func(col string, val bool) qgen.DBTableColumn {
|
||||
if val {
|
||||
return tC{col, "boolean", 0, false, false, "1"}
|
||||
}
|
||||
return tC{col, "boolean", 0, false, false, "0"}
|
||||
}
|
||||
ccol := func(col string, size int, sdefault string) qgen.DBTableColumn {
|
||||
return tC{col, "varchar", size, false, false, sdefault}
|
||||
}
|
||||
text := func(params ...string) qgen.DBTableColumn {
|
||||
if len(params) == 0 {
|
||||
return tC{"", "text", 0, false, false, ""}
|
||||
}
|
||||
col, sdefault := params[0], ""
|
||||
if len(params) > 1 {
|
||||
sdefault = params[1]
|
||||
if sdefault == "" {
|
||||
sdefault = "''"
|
||||
}
|
||||
}
|
||||
return tC{col, "text", 0, false, false, sdefault}
|
||||
}
|
||||
createdAt := func(coll ...string) qgen.DBTableColumn {
|
||||
var col string
|
||||
if len(coll) > 0 {
|
||||
col = coll[0]
|
||||
}
|
||||
if col == "" {
|
||||
col = "createdAt"
|
||||
}
|
||||
return tC{col, "createdAt", 0, false, false, ""}
|
||||
}
|
||||
|
||||
createTable("users", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"uid", "int", 0, false, true, ""},
|
||||
ccol("name", 100, ""),
|
||||
ccol("password", 100, ""),
|
||||
|
||||
ccol("salt", 80, "''"),
|
||||
{"group", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
bcol("active", false),
|
||||
bcol("is_super_admin", false),
|
||||
createdAt(),
|
||||
{"lastActiveAt", "datetime", 0, false, false, ""},
|
||||
ccol("session", 200, "''"),
|
||||
//ccol("authToken", 200, "''"),
|
||||
ccol("last_ip", 200, "''"),
|
||||
{"profile_comments", "int", 0, false, false, "0"},
|
||||
{"who_can_convo", "int", 0, false, false, "0"},
|
||||
{"enable_embeds", "int", 0, false, false, "-1"},
|
||||
ccol("email", 200, "''"),
|
||||
ccol("avatar", 100, "''"),
|
||||
text("message"),
|
||||
|
||||
// TODO: Drop these columns?
|
||||
ccol("url_prefix", 20, "''"),
|
||||
ccol("url_name", 100, "''"),
|
||||
//text("pub_key"),
|
||||
|
||||
{"level", "smallint", 0, false, false, "0"},
|
||||
{"score", "int", 0, false, false, "0"},
|
||||
{"posts", "int", 0, false, false, "0"},
|
||||
{"bigposts", "int", 0, false, false, "0"},
|
||||
{"megaposts", "int", 0, false, false, "0"},
|
||||
{"topics", "int", 0, false, false, "0"},
|
||||
{"liked", "int", 0, false, false, "0"},
|
||||
|
||||
// These two are to bound liked queries with little bits of information we know about the user to reduce the server load
|
||||
{"oldestItemLikedCreatedAt", "datetime", 0, false, false, ""}, // For internal use only, semantics may change
|
||||
{"lastLiked", "datetime", 0, false, false, ""}, // For internal use only, semantics may change
|
||||
|
||||
//{"penalty_count","int",0,false,false,"0"},
|
||||
{"temp_group", "int", 0, false, false, "0"}, // For temporary groups, set this to zero when a temporary group isn't in effect
|
||||
},
|
||||
[]tK{
|
||||
{"uid", "primary", "", false},
|
||||
{"name", "unique", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("users_groups", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"gid", "int", 0, false, true, ""},
|
||||
ccol("name", 100, ""),
|
||||
text("permissions"),
|
||||
text("plugin_perms"),
|
||||
bcol("is_mod", false),
|
||||
bcol("is_admin", false),
|
||||
bcol("is_banned", false),
|
||||
{"user_count", "int", 0, false, false, "0"}, // TODO: Implement this
|
||||
|
||||
ccol("tag", 50, "''"),
|
||||
},
|
||||
[]tK{
|
||||
{"gid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("users_groups_promotions", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"pid", "int", 0, false, true, ""},
|
||||
{"from_gid", "int", 0, false, false, ""},
|
||||
{"to_gid", "int", 0, false, false, ""},
|
||||
bcol("two_way", false), // If a user no longer meets the requirements for this promotion then they will be demoted if this flag is set
|
||||
|
||||
// Requirements
|
||||
{"level", "int", 0, false, false, ""},
|
||||
{"posts", "int", 0, false, false, "0"},
|
||||
{"minTime", "int", 0, false, false, ""}, // How long someone needs to have been in their current group before being promoted
|
||||
{"registeredFor", "int", 0, false, false, "0"}, // minutes
|
||||
},
|
||||
[]tK{
|
||||
{"pid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
/*
|
||||
createTable("users_groups_promotions_scheduled","","",
|
||||
[]tC{
|
||||
{"prid","int",0,false,false,""},
|
||||
{"uid","int",0,false,false,""},
|
||||
{"runAt","datetime",0,false,false,""},
|
||||
},
|
||||
[]tK{
|
||||
// TODO: Test to see that the compound primary key works
|
||||
{"prid,uid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
*/
|
||||
|
||||
createTable("users_2fa_keys", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"uid", "int", 0, false, false, ""},
|
||||
ccol("secret", 100, ""),
|
||||
ccol("scratch1", 50, ""),
|
||||
ccol("scratch2", 50, ""),
|
||||
ccol("scratch3", 50, ""),
|
||||
ccol("scratch4", 50, ""),
|
||||
ccol("scratch5", 50, ""),
|
||||
ccol("scratch6", 50, ""),
|
||||
ccol("scratch7", 50, ""),
|
||||
ccol("scratch8", 50, ""),
|
||||
{"createdAt", "createdAt", 0, false, false, ""},
|
||||
},
|
||||
[]tK{
|
||||
{"uid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
// What should we do about global penalties? Put them on the users table for speed? Or keep them here?
|
||||
// Should we add IP Penalties? No, that's a stupid idea, just implement IP Bans properly. What about shadowbans?
|
||||
// TODO: Perm overrides
|
||||
// TODO: Add a mod-queue and other basic auto-mod features. This is needed for awaiting activation and the mod_queue penalty flag
|
||||
// TODO: Add a penalty type where a user is stopped from creating plugin_guilds social groups
|
||||
// TODO: Shadow bans. We will probably have a CanShadowBan permission for this, as we *really* don't want people using this lightly.
|
||||
/*createTable("users_penalties","","",
|
||||
[]tC{
|
||||
{"uid","int",0,false,false,""},
|
||||
{"element_id","int",0,false,false,""},
|
||||
ccol("element_type",50,""), //forum, profile?, and social_group. Leave blank for global.
|
||||
text("overrides","{}"),
|
||||
|
||||
bcol("mod_queue",false),
|
||||
bcol("shadow_ban",false),
|
||||
bcol("no_avatar",false), // Coming Soon. Should this be a perm override instead?
|
||||
|
||||
// Do we *really* need rate-limit penalty types? Are we going to be allowing bots or something?
|
||||
//{"posts_per_hour","int",0,false,false,"0"},
|
||||
//{"topics_per_hour","int",0,false,false,"0"},
|
||||
//{"posts_count","int",0,false,false,"0"},
|
||||
//{"topic_count","int",0,false,false,"0"},
|
||||
//{"last_hour","int",0,false,false,"0"}, // UNIX Time, as we don't need to do anything too fancy here. When an hour has elapsed since that time, reset the hourly penalty counters.
|
||||
|
||||
{"issued_by","int",0,false,false,""},
|
||||
createdAt("issued_at"),
|
||||
{"expires_at","datetime",0,false,false,""},
|
||||
}, nil,
|
||||
)*/
|
||||
|
||||
createTable("users_groups_scheduler", "", "",
|
||||
[]tC{
|
||||
{"uid", "int", 0, false, false, ""},
|
||||
{"set_group", "int", 0, false, false, ""},
|
||||
|
||||
{"issued_by", "int", 0, false, false, ""},
|
||||
createdAt("issued_at"),
|
||||
{"revert_at", "datetime", 0, false, false, ""},
|
||||
{"temporary", "boolean", 0, false, false, ""}, // special case for permanent bans to do the necessary bookkeeping, might be removed in the future
|
||||
},
|
||||
[]tK{
|
||||
{"uid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
// TODO: Can we use a piece of software dedicated to persistent queues for this rather than relying on the database for it?
|
||||
createTable("users_avatar_queue", "", "",
|
||||
[]tC{
|
||||
{"uid", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
},
|
||||
[]tK{
|
||||
{"uid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
// TODO: Should we add a users prefix to this table to fit the "unofficial convention"?
|
||||
// TODO: Add an autoincrement key?
|
||||
createTable("emails", "", "",
|
||||
[]tC{
|
||||
ccol("email", 200, ""),
|
||||
{"uid", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
bcol("validated", false),
|
||||
ccol("token", 200, "''"),
|
||||
}, nil,
|
||||
)
|
||||
|
||||
// TODO: Allow for patterns in domains, if the bots try to shake things up there?
|
||||
/*
|
||||
createTable("email_domain_blacklist", "", "",
|
||||
[]tC{
|
||||
ccol("domain", 200, ""),
|
||||
bcol("gtld", false),
|
||||
},
|
||||
[]tK{
|
||||
{"domain", "primary"},
|
||||
},
|
||||
)
|
||||
*/
|
||||
|
||||
// TODO: Implement password resets
|
||||
createTable("password_resets", "", "",
|
||||
[]tC{
|
||||
ccol("email", 200, ""),
|
||||
{"uid", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
ccol("validated", 200, ""), // Token given once the one-use token is consumed, used to prevent multiple people consuming the same one-use token
|
||||
ccol("token", 200, ""),
|
||||
createdAt(),
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("forums", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"fid", "int", 0, false, true, ""},
|
||||
ccol("name", 100, ""),
|
||||
ccol("desc", 200, ""),
|
||||
ccol("tmpl", 200, "''"),
|
||||
bcol("active", true),
|
||||
{"order", "int", 0, false, false, "0"},
|
||||
{"topicCount", "int", 0, false, false, "0"},
|
||||
ccol("preset", 100, "''"),
|
||||
{"parentID", "int", 0, false, false, "0"},
|
||||
ccol("parentType", 50, "''"),
|
||||
{"lastTopicID", "int", 0, false, false, "0"},
|
||||
{"lastReplyerID", "int", 0, false, false, "0"},
|
||||
},
|
||||
[]tK{
|
||||
{"fid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("forums_permissions", "", "",
|
||||
[]tC{
|
||||
{"fid", "int", 0, false, false, ""},
|
||||
{"gid", "int", 0, false, false, ""},
|
||||
ccol("preset", 100, "''"),
|
||||
text("permissions", "{}"),
|
||||
},
|
||||
[]tK{
|
||||
// TODO: Test to see that the compound primary key works
|
||||
{"fid,gid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("topics", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"tid", "int", 0, false, true, ""},
|
||||
ccol("title", 100, ""), // TODO: Increase the max length to 200?
|
||||
text("content"),
|
||||
text("parsed_content"),
|
||||
createdAt(),
|
||||
{"lastReplyAt", "datetime", 0, false, false, ""},
|
||||
{"lastReplyBy", "int", 0, false, false, ""},
|
||||
{"lastReplyID", "int", 0, false, false, "0"},
|
||||
{"createdBy", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
bcol("is_closed", false),
|
||||
bcol("sticky", false),
|
||||
// TODO: Add an index for this
|
||||
{"parentID", "int", 0, false, false, "2"},
|
||||
ccol("ip", 200, "''"),
|
||||
{"postCount", "int", 0, false, false, "1"},
|
||||
{"likeCount", "int", 0, false, false, "0"},
|
||||
{"attachCount", "int", 0, false, false, "0"},
|
||||
{"words", "int", 0, false, false, "0"},
|
||||
{"views", "int", 0, false, false, "0"},
|
||||
//{"dayViews", "int", 0, false, false, "0"},
|
||||
{"weekEvenViews", "int", 0, false, false, "0"},
|
||||
{"weekOddViews", "int", 0, false, false, "0"},
|
||||
///{"weekViews", "int", 0, false, false, "0"},
|
||||
///{"lastWeekViews", "int", 0, false, false, "0"},
|
||||
//{"monthViews", "int", 0, false, false, "0"},
|
||||
// ? - A little hacky, maybe we could do something less likely to bite us with huge numbers of topics?
|
||||
// TODO: Add an index for this?
|
||||
//{"lastMonth", "datetime", 0, false, false, ""},
|
||||
ccol("css_class", 100, "''"),
|
||||
{"poll", "int", 0, false, false, "0"},
|
||||
ccol("data", 200, "''"),
|
||||
},
|
||||
[]tK{
|
||||
{"tid", "primary", "", false},
|
||||
{"title", "fulltext", "", false},
|
||||
{"content", "fulltext", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("replies", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"rid", "int", 0, false, true, ""}, // TODO: Rename to replyID?
|
||||
{"tid", "int", 0, false, false, ""}, // TODO: Rename to topicID?
|
||||
text("content"),
|
||||
text("parsed_content"),
|
||||
createdAt(),
|
||||
{"createdBy", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
{"lastEdit", "int", 0, false, false, "0"},
|
||||
{"lastEditBy", "int", 0, false, false, "0"},
|
||||
{"lastUpdated", "datetime", 0, false, false, ""},
|
||||
ccol("ip", 200, "''"),
|
||||
{"likeCount", "int", 0, false, false, "0"},
|
||||
{"attachCount", "int", 0, false, false, "0"},
|
||||
{"words", "int", 0, false, false, "1"}, // ? - replies has a default of 1 and topics has 0? why?
|
||||
ccol("actionType", 20, "''"),
|
||||
{"poll", "int", 0, false, false, "0"},
|
||||
},
|
||||
[]tK{
|
||||
{"rid", "primary", "", false},
|
||||
{"content", "fulltext", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("attachments", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"attachID", "int", 0, false, true, ""},
|
||||
{"sectionID", "int", 0, false, false, "0"},
|
||||
ccol("sectionTable", 200, "forums"),
|
||||
{"originID", "int", 0, false, false, ""},
|
||||
ccol("originTable", 200, "replies"),
|
||||
{"uploadedBy", "int", 0, false, false, ""}, // TODO; Make this a foreign key
|
||||
ccol("path", 200, ""),
|
||||
ccol("extra", 200, ""),
|
||||
},
|
||||
[]tK{
|
||||
{"attachID", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("revisions", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"reviseID", "int", 0, false, true, ""},
|
||||
text("content"),
|
||||
{"contentID", "int", 0, false, false, ""},
|
||||
ccol("contentType", 100, "replies"),
|
||||
createdAt(),
|
||||
// TODO: Add a createdBy column?
|
||||
},
|
||||
[]tK{
|
||||
{"reviseID", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("polls", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"pollID", "int", 0, false, true, ""},
|
||||
{"parentID", "int", 0, false, false, "0"},
|
||||
ccol("parentTable", 100, "topics"), // topics, replies
|
||||
{"type", "int", 0, false, false, "0"},
|
||||
{"options", "json", 0, false, false, ""},
|
||||
{"votes", "int", 0, false, false, "0"},
|
||||
},
|
||||
[]tK{
|
||||
{"pollID", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("polls_options", "", "",
|
||||
[]tC{
|
||||
{"pollID", "int", 0, false, false, ""},
|
||||
{"option", "int", 0, false, false, "0"},
|
||||
{"votes", "int", 0, false, false, "0"},
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("polls_votes", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"pollID", "int", 0, false, false, ""},
|
||||
{"uid", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
{"option", "int", 0, false, false, "0"},
|
||||
createdAt("castAt"),
|
||||
ccol("ip", 200, "''"),
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("users_replies", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"rid", "int", 0, false, true, ""},
|
||||
{"uid", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
text("content"),
|
||||
text("parsed_content"),
|
||||
createdAt(),
|
||||
{"createdBy", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
{"lastEdit", "int", 0, false, false, "0"},
|
||||
{"lastEditBy", "int", 0, false, false, "0"},
|
||||
ccol("ip", 200, "''"),
|
||||
},
|
||||
[]tK{
|
||||
{"rid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("likes", "", "",
|
||||
[]tC{
|
||||
{"weight", "tinyint", 0, false, false, "1"},
|
||||
{"targetItem", "int", 0, false, false, ""},
|
||||
ccol("targetType", 50, "replies"),
|
||||
{"sentBy", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
createdAt(),
|
||||
{"recalc", "tinyint", 0, false, false, "0"},
|
||||
}, nil,
|
||||
)
|
||||
|
||||
//columns("participants,createdBy,createdAt,lastReplyBy,lastReplyAt").Where("cid=?")
|
||||
createTable("conversations", "", "",
|
||||
[]tC{
|
||||
{"cid", "int", 0, false, true, ""},
|
||||
{"createdBy", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
createdAt(),
|
||||
{"lastReplyAt", "datetime", 0, false, false, ""},
|
||||
{"lastReplyBy", "int", 0, false, false, ""},
|
||||
},
|
||||
[]tK{
|
||||
{"cid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("conversations_posts", "", "",
|
||||
[]tC{
|
||||
{"pid", "int", 0, false, true, ""},
|
||||
{"cid", "int", 0, false, false, ""},
|
||||
{"createdBy", "int", 0, false, false, ""},
|
||||
ccol("body", 50, ""),
|
||||
ccol("post", 50, "''"),
|
||||
},
|
||||
[]tK{
|
||||
{"pid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("conversations_participants", "", "",
|
||||
[]tC{
|
||||
{"uid", "int", 0, false, false, ""},
|
||||
{"cid", "int", 0, false, false, ""},
|
||||
}, nil,
|
||||
)
|
||||
|
||||
/*
|
||||
createTable("users_friends", "", "",
|
||||
[]tC{
|
||||
{"uid", "int", 0, false, false, ""},
|
||||
{"uid2", "int", 0, false, false, ""},
|
||||
}, nil,
|
||||
)
|
||||
createTable("users_friends_invites", "", "",
|
||||
[]tC{
|
||||
{"requester", "int", 0, false, false, ""},
|
||||
{"target", "int", 0, false, false, ""},
|
||||
}, nil,
|
||||
)
|
||||
*/
|
||||
|
||||
createTable("users_blocks", "", "",
|
||||
[]tC{
|
||||
{"blocker", "int", 0, false, false, ""},
|
||||
{"blockedUser", "int", 0, false, false, ""},
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("activity_stream_matches", "", "",
|
||||
[]tC{
|
||||
{"watcher", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
{"asid", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
},
|
||||
[]tK{
|
||||
{"asid,asid", "foreign", "activity_stream", true},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("activity_stream", "", "",
|
||||
[]tC{
|
||||
{"asid", "int", 0, false, true, ""},
|
||||
{"actor", "int", 0, false, false, ""}, /* the one doing the act */ // TODO: Make this a foreign key
|
||||
{"targetUser", "int", 0, false, false, ""}, /* the user who created the item the actor is acting on, some items like forums may lack a targetUser field */
|
||||
ccol("event", 50, ""), /* mention, like, reply (as in the act of replying to an item, not the reply item type, you can "reply" to a forum by making a topic in it), friend_invite */
|
||||
ccol("elementType", 50, ""), /* topic, post (calling it post here to differentiate it from the 'reply' event), forum, user */
|
||||
|
||||
// replacement for elementType
|
||||
tC{"elementTable", "int", 0, false, false, "0"},
|
||||
|
||||
{"elementID", "int", 0, false, false, ""}, /* the ID of the element being acted upon */
|
||||
createdAt(),
|
||||
ccol("extra", 200, "''"),
|
||||
},
|
||||
[]tK{
|
||||
{"asid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("activity_subscriptions", "", "",
|
||||
[]tC{
|
||||
{"user", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
{"targetID", "int", 0, false, false, ""}, /* the ID of the element being acted upon */
|
||||
ccol("targetType", 50, ""), /* topic, post (calling it post here to differentiate it from the 'reply' event), forum, user */
|
||||
{"level", "int", 0, false, false, "0"}, /* 0: Mentions (aka the global default for any post), 1: Replies To You, 2: All Replies*/
|
||||
}, nil,
|
||||
)
|
||||
|
||||
/* Due to MySQL's design, we have to drop the unique keys for table settings, plugins, and themes down from 200 to 180 or it will error */
|
||||
createTable("settings", "", "",
|
||||
[]tC{
|
||||
ccol("name", 180, ""),
|
||||
ccol("content", 250, ""),
|
||||
ccol("type", 50, ""),
|
||||
ccol("constraints", 200, "''"),
|
||||
},
|
||||
[]tK{
|
||||
{"name", "unique", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("word_filters", "", "",
|
||||
[]tC{
|
||||
{"wfid", "int", 0, false, true, ""},
|
||||
ccol("find", 200, ""),
|
||||
ccol("replacement", 200, ""),
|
||||
},
|
||||
[]tK{
|
||||
{"wfid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("plugins", "", "",
|
||||
[]tC{
|
||||
ccol("uname", 180, ""),
|
||||
bcol("active", false),
|
||||
bcol("installed", false),
|
||||
},
|
||||
[]tK{
|
||||
{"uname", "unique", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("themes", "", "",
|
||||
[]tC{
|
||||
ccol("uname", 180, ""),
|
||||
bcol("default", false),
|
||||
//text("profileUserVars"),
|
||||
},
|
||||
[]tK{
|
||||
{"uname", "unique", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("widgets", "", "",
|
||||
[]tC{
|
||||
{"wid", "int", 0, false, true, ""},
|
||||
{"position", "int", 0, false, false, ""},
|
||||
ccol("side", 100, ""),
|
||||
ccol("type", 100, ""),
|
||||
bcol("active", false),
|
||||
ccol("location", 100, ""),
|
||||
text("data"),
|
||||
},
|
||||
[]tK{
|
||||
{"wid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("menus", "", "",
|
||||
[]tC{
|
||||
{"mid", "int", 0, false, true, ""},
|
||||
},
|
||||
[]tK{
|
||||
{"mid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("menu_items", "", "",
|
||||
[]tC{
|
||||
{"miid", "int", 0, false, true, ""},
|
||||
{"mid", "int", 0, false, false, ""},
|
||||
ccol("name", 200, "''"),
|
||||
ccol("htmlID", 200, "''"),
|
||||
ccol("cssClass", 200, "''"),
|
||||
ccol("position", 100, ""),
|
||||
ccol("path", 200, "''"),
|
||||
ccol("aria", 200, "''"),
|
||||
ccol("tooltip", 200, "''"),
|
||||
ccol("tmplName", 200, "''"),
|
||||
{"order", "int", 0, false, false, "0"},
|
||||
|
||||
bcol("guestOnly", false),
|
||||
bcol("memberOnly", false),
|
||||
bcol("staffOnly", false),
|
||||
bcol("adminOnly", false),
|
||||
},
|
||||
[]tK{
|
||||
{"miid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("pages", mysqlPre, mysqlCol,
|
||||
[]tC{
|
||||
{"pid", "int", 0, false, true, ""},
|
||||
//ccol("path", 200, ""),
|
||||
ccol("name", 200, ""),
|
||||
ccol("title", 200, ""),
|
||||
text("body"),
|
||||
// TODO: Make this a table?
|
||||
text("allowedGroups"),
|
||||
{"menuID", "int", 0, false, false, "-1"}, // simple sidebar menu
|
||||
},
|
||||
[]tK{
|
||||
{"pid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("registration_logs", "", "",
|
||||
[]tC{
|
||||
{"rlid", "int", 0, false, true, ""},
|
||||
ccol("username", 100, ""),
|
||||
{"email", "varchar", 100, false, false, ""},
|
||||
ccol("failureReason", 100, ""),
|
||||
bcol("success", false), // Did this attempt succeed?
|
||||
ccol("ipaddress", 200, ""),
|
||||
createdAt("doneAt"),
|
||||
},
|
||||
[]tK{
|
||||
{"rlid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("login_logs", "", "",
|
||||
[]tC{
|
||||
{"lid", "int", 0, false, true, ""},
|
||||
{"uid", "int", 0, false, false, ""},
|
||||
|
||||
bcol("success", false), // Did this attempt succeed?
|
||||
ccol("ipaddress", 200, ""),
|
||||
createdAt("doneAt"),
|
||||
},
|
||||
[]tK{
|
||||
{"lid", "primary", "", false},
|
||||
},
|
||||
)
|
||||
|
||||
createTable("moderation_logs", "", "",
|
||||
[]tC{
|
||||
ccol("action", 100, ""),
|
||||
{"elementID", "int", 0, false, false, ""},
|
||||
ccol("elementType", 100, ""),
|
||||
ccol("ipaddress", 200, ""),
|
||||
{"actorID", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
{"doneAt", "datetime", 0, false, false, ""},
|
||||
text("extra"),
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("administration_logs", "", "",
|
||||
[]tC{
|
||||
ccol("action", 100, ""),
|
||||
{"elementID", "int", 0, false, false, ""},
|
||||
ccol("elementType", 100, ""),
|
||||
ccol("ipaddress", 200, ""),
|
||||
{"actorID", "int", 0, false, false, ""}, // TODO: Make this a foreign key
|
||||
{"doneAt", "datetime", 0, false, false, ""},
|
||||
text("extra"),
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("viewchunks", "", "",
|
||||
[]tC{
|
||||
{"count", "int", 0, false, false, "0"},
|
||||
{"avg", "int", 0, false, false, "0"},
|
||||
{"createdAt", "datetime", 0, false, false, ""},
|
||||
ccol("route", 200, ""), // TODO: set a default empty here
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("viewchunks_agents", "", "",
|
||||
[]tC{
|
||||
{"count", "int", 0, false, false, "0"},
|
||||
{"createdAt", "datetime", 0, false, false, ""},
|
||||
ccol("browser", 200, ""), // googlebot, firefox, opera, etc.
|
||||
//ccol("version",0,""), // the version of the browser or bot
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("viewchunks_systems", "", "",
|
||||
[]tC{
|
||||
{"count", "int", 0, false, false, "0"},
|
||||
{"createdAt", "datetime", 0, false, false, ""},
|
||||
ccol("system", 200, ""), // windows, android, unknown, etc.
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("viewchunks_langs", "", "",
|
||||
[]tC{
|
||||
{"count", "int", 0, false, false, "0"},
|
||||
{"createdAt", "datetime", 0, false, false, ""},
|
||||
ccol("lang", 200, ""), // en, ru, etc.
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("viewchunks_referrers", "", "",
|
||||
[]tC{
|
||||
{"count", "int", 0, false, false, "0"},
|
||||
{"createdAt", "datetime", 0, false, false, ""},
|
||||
ccol("domain", 200, ""),
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("viewchunks_forums", "", "",
|
||||
[]tC{
|
||||
{"count", "int", 0, false, false, "0"},
|
||||
{"createdAt", "datetime", 0, false, false, ""},
|
||||
{"forum", "int", 0, false, false, ""},
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("topicchunks", "", "",
|
||||
[]tC{
|
||||
{"count", "int", 0, false, false, "0"},
|
||||
{"createdAt", "datetime", 0, false, false, ""},
|
||||
// TODO: Add a column for the parent forum?
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("postchunks", "", "",
|
||||
[]tC{
|
||||
{"count", "int", 0, false, false, "0"},
|
||||
{"createdAt", "datetime", 0, false, false, ""},
|
||||
// TODO: Add a column for the parent topic / profile?
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("memchunks", "", "",
|
||||
[]tC{
|
||||
{"count", "int", 0, false, false, "0"},
|
||||
{"stack", "int", 0, false, false, "0"},
|
||||
{"heap", "int", 0, false, false, "0"},
|
||||
{"createdAt", "datetime", 0, false, false, ""},
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("perfchunks", "", "",
|
||||
[]tC{
|
||||
{"low", "int", 0, false, false, "0"},
|
||||
{"high", "int", 0, false, false, "0"},
|
||||
{"avg", "int", 0, false, false, "0"},
|
||||
{"createdAt", "datetime", 0, false, false, ""},
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("sync", "", "",
|
||||
[]tC{
|
||||
{"last_update", "datetime", 0, false, false, ""},
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("updates", "", "",
|
||||
[]tC{
|
||||
{"dbVersion", "int", 0, false, false, "0"},
|
||||
}, nil,
|
||||
)
|
||||
|
||||
createTable("meta", "", "",
|
||||
[]tC{
|
||||
ccol("name", 200, ""),
|
||||
ccol("value", 200, ""),
|
||||
}, nil,
|
||||
)
|
||||
|
||||
/*createTable("tables", "", "",
|
||||
[]tC{
|
||||
{"id", "int", 0, false, true, ""},
|
||||
ccol("name", 200, ""),
|
||||
},
|
||||
[]tK{
|
||||
{"id", "primary", "", false},
|
||||
{"name", "unique", "", false},
|
||||
},
|
||||
)*/
|
||||
|
||||
return err
|
||||
}
|
@ -1,113 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var Activity ActivityStream
|
||||
|
||||
type ActivityStream interface {
|
||||
Add(a Alert) (int, error)
|
||||
Get(id int) (Alert, error)
|
||||
Delete(id int) error
|
||||
DeleteByParams(event string, targetID int, targetType string) error
|
||||
DeleteByParamsExtra(event string, targetID int, targetType, extra string) error
|
||||
AidsByParams(event string, elementID int, elementType string) (aids []int, err error)
|
||||
AidsByParamsExtra(event string, elementID int, elementType, extra string) (aids []int, err error)
|
||||
Count() (count int)
|
||||
}
|
||||
|
||||
type DefaultActivityStream struct {
|
||||
add *sql.Stmt
|
||||
get *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
deleteByParams *sql.Stmt
|
||||
deleteByParamsExtra *sql.Stmt
|
||||
aidsByParams *sql.Stmt
|
||||
aidsByParamsExtra *sql.Stmt
|
||||
count *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultActivityStream(acc *qgen.Accumulator) (*DefaultActivityStream, error) {
|
||||
as := "activity_stream"
|
||||
cols := "actor,targetUser,event,elementType,elementID,createdAt,extra"
|
||||
return &DefaultActivityStream{
|
||||
add: acc.Insert(as).Columns(cols).Fields("?,?,?,?,?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
get: acc.Select(as).Columns(cols).Where("asid=?").Prepare(),
|
||||
delete: acc.Delete(as).Where("asid=?").Prepare(),
|
||||
deleteByParams: acc.Delete(as).Where("event=? AND elementID=? AND elementType=?").Prepare(),
|
||||
deleteByParamsExtra: acc.Delete(as).Where("event=? AND elementID=? AND elementType=? AND extra=?").Prepare(),
|
||||
aidsByParams: acc.Select(as).Columns("asid").Where("event=? AND elementID=? AND elementType=?").Prepare(),
|
||||
aidsByParamsExtra: acc.Select(as).Columns("asid").Where("event=? AND elementID=? AND elementType=? AND extra=?").Prepare(),
|
||||
count: acc.Count(as).Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStream) Add(a Alert) (int, error) {
|
||||
res, err := s.add.Exec(a.ActorID, a.TargetUserID, a.Event, a.ElementType, a.ElementID, a.Extra)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
lastID, err := res.LastInsertId()
|
||||
return int(lastID), err
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStream) Get(id int) (Alert, error) {
|
||||
a := Alert{ASID: id}
|
||||
err := s.get.QueryRow(id).Scan(&a.ActorID, &a.TargetUserID, &a.Event, &a.ElementType, &a.ElementID, &a.CreatedAt, &a.Extra)
|
||||
return a, err
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStream) Delete(id int) error {
|
||||
_, err := s.delete.Exec(id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStream) DeleteByParams(event string, elementID int, elementType string) error {
|
||||
_, err := s.deleteByParams.Exec(event, elementID, elementType)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStream) DeleteByParamsExtra(event string, elementID int, elementType, extra string) error {
|
||||
_, err := s.deleteByParamsExtra.Exec(event, elementID, elementType, extra)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStream) AidsByParams(event string, elementID int, elementType string) (aids []int, err error) {
|
||||
rows, err := s.aidsByParams.Query(event, elementID, elementType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
var aid int
|
||||
if err := rows.Scan(&aid); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
aids = append(aids, aid)
|
||||
}
|
||||
return aids, rows.Err()
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStream) AidsByParamsExtra(event string, elementID int, elementType, extra string) (aids []int, e error) {
|
||||
rows, e := s.aidsByParamsExtra.Query(event, elementID, elementType, extra)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
var aid int
|
||||
if e := rows.Scan(&aid); e != nil {
|
||||
return nil, e
|
||||
}
|
||||
aids = append(aids, aid)
|
||||
}
|
||||
return aids, rows.Err()
|
||||
}
|
||||
|
||||
// Count returns the total number of activity stream items
|
||||
func (s *DefaultActivityStream) Count() (count int) {
|
||||
return Count(s.count)
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var ActivityMatches ActivityStreamMatches
|
||||
|
||||
type ActivityStreamMatches interface {
|
||||
Add(watcher, asid int) error
|
||||
Delete(watcher, asid int) error
|
||||
DeleteAndCountChanged(watcher, asid int) (int, error)
|
||||
CountAsid(asid int) int
|
||||
}
|
||||
|
||||
type DefaultActivityStreamMatches struct {
|
||||
add *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
countAsid *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultActivityStreamMatches(acc *qgen.Accumulator) (*DefaultActivityStreamMatches, error) {
|
||||
asm := "activity_stream_matches"
|
||||
return &DefaultActivityStreamMatches{
|
||||
add: acc.Insert(asm).Columns("watcher,asid").Fields("?,?").Prepare(),
|
||||
delete: acc.Delete(asm).Where("watcher=? AND asid=?").Prepare(),
|
||||
countAsid: acc.Count(asm).Where("asid=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStreamMatches) Add(watcher, asid int) error {
|
||||
_, e := s.add.Exec(watcher, asid)
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStreamMatches) Delete(watcher, asid int) error {
|
||||
_, e := s.delete.Exec(watcher, asid)
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStreamMatches) DeleteAndCountChanged(watcher, asid int) (int, error) {
|
||||
res, e := s.delete.Exec(watcher, asid)
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
c64, e := res.RowsAffected()
|
||||
return int(c64), e
|
||||
}
|
||||
|
||||
func (s *DefaultActivityStreamMatches) CountAsid(asid int) int {
|
||||
return Countf(s.countAsid, asid)
|
||||
}
|
547
common/alerts.go
547
common/alerts.go
@ -1,7 +1,7 @@
|
||||
/*
|
||||
*
|
||||
* Gosora Alerts System
|
||||
* Copyright Azareal 2017 - 2020
|
||||
* Copyright Azareal 2017 - 2018
|
||||
*
|
||||
*/
|
||||
package common
|
||||
@ -11,30 +11,16 @@ import (
|
||||
"errors"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
//"fmt"
|
||||
|
||||
"git.tuxpa.in/a/gosora/common/phrases"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
type Alert struct {
|
||||
ASID int
|
||||
ActorID int
|
||||
TargetUserID int
|
||||
Event string
|
||||
ElementType string
|
||||
ElementID int
|
||||
CreatedAt time.Time
|
||||
Extra string
|
||||
|
||||
Actor *User
|
||||
}
|
||||
|
||||
type AlertStmts struct {
|
||||
notifyWatchers *sql.Stmt
|
||||
getWatchers *sql.Stmt
|
||||
addActivity *sql.Stmt
|
||||
notifyWatchers *sql.Stmt
|
||||
notifyOne *sql.Stmt
|
||||
getWatchers *sql.Stmt
|
||||
getActivityEntry *sql.Stmt
|
||||
}
|
||||
|
||||
var alertStmts AlertStmts
|
||||
@ -42,344 +28,215 @@ var alertStmts AlertStmts
|
||||
// TODO: Move these statements into some sort of activity abstraction
|
||||
// TODO: Rewrite the alerts logic
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
alertStmts = AlertStmts{
|
||||
notifyWatchers: acc.SimpleInsertInnerJoin(
|
||||
qgen.DBInsert{"activity_stream_matches", "watcher,asid", ""},
|
||||
qgen.DBJoin{"activity_stream", "activity_subscriptions", "activity_subscriptions.user, activity_stream.asid", "activity_subscriptions.targetType = activity_stream.elementType AND activity_subscriptions.targetID = activity_stream.elementID AND activity_subscriptions.user != activity_stream.actor", "asid=?", "", ""},
|
||||
),
|
||||
getWatchers: acc.SimpleInnerJoin("activity_stream", "activity_subscriptions", "activity_subscriptions.user", "activity_subscriptions.targetType = activity_stream.elementType AND activity_subscriptions.targetID = activity_stream.elementID AND activity_subscriptions.user != activity_stream.actor", "asid=?", "", ""),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
alertStmts = AlertStmts{
|
||||
addActivity: acc.Insert("activity_stream").Columns("actor, targetUser, event, elementType, elementID").Fields("?,?,?,?,?").Prepare(),
|
||||
notifyWatchers: acc.SimpleInsertInnerJoin(
|
||||
qgen.DBInsert{"activity_stream_matches", "watcher, asid", ""},
|
||||
qgen.DBJoin{"activity_stream", "activity_subscriptions", "activity_subscriptions.user, activity_stream.asid", "activity_subscriptions.targetType = activity_stream.elementType AND activity_subscriptions.targetID = activity_stream.elementID AND activity_subscriptions.user != activity_stream.actor", "asid = ?", "", ""},
|
||||
),
|
||||
notifyOne: acc.Insert("activity_stream_matches").Columns("watcher, asid").Fields("?,?").Prepare(),
|
||||
getWatchers: acc.SimpleInnerJoin("activity_stream", "activity_subscriptions", "activity_subscriptions.user", "activity_subscriptions.targetType = activity_stream.elementType AND activity_subscriptions.targetID = activity_stream.elementID AND activity_subscriptions.user != activity_stream.actor", "asid = ?", "", ""),
|
||||
getActivityEntry: acc.Select("activity_stream").Columns("actor, targetUser, event, elementType, elementID").Where("asid = ?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
const AlertsGrowHint = len(`{"msgs":[],"count":,"tc":}`) + 1 + 10
|
||||
// These notes are for me, don't worry about it too much ^_^
|
||||
/*
|
||||
"You received a friend invite from {user}"
|
||||
"{x}{mentioned you on}{user}{'s profile}"
|
||||
"{x}{mentioned you in}{topic}"
|
||||
"{x}{likes}{you}"
|
||||
"{x}{liked}{your topic}{topic}"
|
||||
"{x}{liked}{your post on}{user}{'s profile}" todo
|
||||
"{x}{liked}{your post in}{topic}"
|
||||
"{x}{replied to}{your post in}{topic}" todo
|
||||
"{x}{replied to}{topic}"
|
||||
"{x}{replied to}{your topic}{topic}"
|
||||
"{x}{created a new topic}{topic}"
|
||||
*/
|
||||
|
||||
// TODO: See if we can json.Marshal instead?
|
||||
func escapeTextInJson(in string) string {
|
||||
in = strings.Replace(in, "\"", "\\\"", -1)
|
||||
return strings.Replace(in, "/", "\\/", -1)
|
||||
func BuildAlert(asid int, event string, elementType string, actorID int, targetUserID int, elementID int, user User /* The current user */) (string, error) {
|
||||
var targetUser *User
|
||||
|
||||
actor, err := Users.Get(actorID)
|
||||
if err != nil {
|
||||
return "", errors.New("Unable to find the actor")
|
||||
}
|
||||
|
||||
/*if elementType != "forum" {
|
||||
targetUser, err = users.Get(targetUser_id)
|
||||
if err != nil {
|
||||
LocalErrorJS("Unable to find the target user",w,r)
|
||||
return
|
||||
}
|
||||
}*/
|
||||
|
||||
if event == "friend_invite" {
|
||||
return `{"msg":"You received a friend invite from {0}","sub":["` + actor.Name + `"],"path":"` + actor.Link + `","avatar":"` + strings.Replace(actor.Avatar, "/", "\\/", -1) + `","asid":"` + strconv.Itoa(asid) + `"}`, nil
|
||||
}
|
||||
|
||||
var act, postAct, url, area string
|
||||
var startFrag, endFrag string
|
||||
switch elementType {
|
||||
case "forum":
|
||||
if event == "reply" {
|
||||
act = "created a new topic"
|
||||
topic, err := Topics.Get(elementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find linked topic %d", elementID)
|
||||
return "", errors.New("Unable to find the linked topic")
|
||||
}
|
||||
url = topic.Link
|
||||
area = topic.Title
|
||||
// Store the forum ID in the targetUser column instead of making a new one? o.O
|
||||
// Add an additional column for extra information later on when we add the ability to link directly to posts. We don't need the forum data for now...
|
||||
} else {
|
||||
act = "did something in a forum"
|
||||
}
|
||||
case "topic":
|
||||
topic, err := Topics.Get(elementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find linked topic %d", elementID)
|
||||
return "", errors.New("Unable to find the linked topic")
|
||||
}
|
||||
url = topic.Link
|
||||
area = topic.Title
|
||||
|
||||
if targetUserID == user.ID {
|
||||
postAct = " your topic"
|
||||
}
|
||||
case "user":
|
||||
targetUser, err = Users.Get(elementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find target user %d", elementID)
|
||||
return "", errors.New("Unable to find the target user")
|
||||
}
|
||||
area = targetUser.Name
|
||||
endFrag = "'s profile"
|
||||
url = targetUser.Link
|
||||
case "post":
|
||||
topic, err := TopicByReplyID(elementID)
|
||||
if err != nil {
|
||||
return "", errors.New("Unable to find the linked reply or parent topic")
|
||||
}
|
||||
url = topic.Link
|
||||
area = topic.Title
|
||||
if targetUserID == user.ID {
|
||||
postAct = " your post in"
|
||||
}
|
||||
default:
|
||||
return "", errors.New("Invalid elementType")
|
||||
}
|
||||
|
||||
switch event {
|
||||
case "like":
|
||||
if elementType == "user" {
|
||||
act = "likes"
|
||||
endFrag = ""
|
||||
if targetUser.ID == user.ID {
|
||||
area = "you"
|
||||
}
|
||||
} else {
|
||||
act = "liked"
|
||||
}
|
||||
case "mention":
|
||||
if elementType == "user" {
|
||||
act = "mentioned you on"
|
||||
} else {
|
||||
act = "mentioned you in"
|
||||
postAct = ""
|
||||
}
|
||||
case "reply":
|
||||
act = "replied to"
|
||||
}
|
||||
|
||||
return `{"msg":"{0} ` + startFrag + act + postAct + ` {1}` + endFrag + `","sub":["` + actor.Name + `","` + area + `"],"path":"` + url + `","avatar":"` + actor.Avatar + `","asid":"` + strconv.Itoa(asid) + `"}`, nil
|
||||
}
|
||||
|
||||
func BuildAlert(a Alert, user User /* The current user */) (out string, err error) {
|
||||
var targetUser *User
|
||||
if a.Actor == nil {
|
||||
a.Actor, err = Users.Get(a.ActorID)
|
||||
if err != nil {
|
||||
return "", errors.New(phrases.GetErrorPhrase("alerts_no_actor"))
|
||||
}
|
||||
}
|
||||
|
||||
/*if a.ElementType != "forum" {
|
||||
targetUser, err = users.Get(a.TargetUserID)
|
||||
if err != nil {
|
||||
LocalErrorJS("Unable to find the target user",w,r)
|
||||
return
|
||||
}
|
||||
}*/
|
||||
if a.Event == "friend_invite" {
|
||||
return buildAlertString(".new_friend_invite", []string{a.Actor.Name}, a.Actor.Link, a.Actor.Avatar, a.ASID), nil
|
||||
}
|
||||
|
||||
// Not that many events for us to handle in a forum
|
||||
if a.ElementType == "forum" {
|
||||
if a.Event == "reply" {
|
||||
topic, err := Topics.Get(a.ElementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find linked topic %d", a.ElementID)
|
||||
return "", errors.New(phrases.GetErrorPhrase("alerts_no_linked_topic"))
|
||||
}
|
||||
// Store the forum ID in the targetUser column instead of making a new one? o.O
|
||||
// Add an additional column for extra information later on when we add the ability to link directly to posts. We don't need the forum data for now...
|
||||
return buildAlertString(".forum_new_topic", []string{a.Actor.Name, topic.Title}, topic.Link, a.Actor.Avatar, a.ASID), nil
|
||||
}
|
||||
return buildAlertString(".forum_unknown_action", []string{a.Actor.Name}, "", a.Actor.Avatar, a.ASID), nil
|
||||
}
|
||||
|
||||
var url, area, phraseName string
|
||||
own := false
|
||||
// TODO: Avoid loading a bit of data twice
|
||||
switch a.ElementType {
|
||||
case "convo":
|
||||
convo, err := Convos.Get(a.ElementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find linked convo %d", a.ElementID)
|
||||
return "", errors.New(phrases.GetErrorPhrase("alerts_no_linked_convo"))
|
||||
}
|
||||
url = convo.Link
|
||||
case "topic":
|
||||
topic, err := Topics.Get(a.ElementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find linked topic %d", a.ElementID)
|
||||
return "", errors.New(phrases.GetErrorPhrase("alerts_no_linked_topic"))
|
||||
}
|
||||
url = topic.Link
|
||||
area = topic.Title
|
||||
own = a.TargetUserID == user.ID
|
||||
case "user":
|
||||
targetUser, err = Users.Get(a.ElementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find target user %d", a.ElementID)
|
||||
return "", errors.New(phrases.GetErrorPhrase("alerts_no_target_user"))
|
||||
}
|
||||
area = targetUser.Name
|
||||
url = targetUser.Link
|
||||
own = a.TargetUserID == user.ID
|
||||
case "post":
|
||||
topic, err := TopicByReplyID(a.ElementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find linked topic by reply ID %d", a.ElementID)
|
||||
return "", errors.New(phrases.GetErrorPhrase("alerts_no_linked_topic_by_reply"))
|
||||
}
|
||||
url = topic.Link
|
||||
area = topic.Title
|
||||
own = a.TargetUserID == user.ID
|
||||
default:
|
||||
return "", errors.New(phrases.GetErrorPhrase("alerts_invalid_elementtype"))
|
||||
}
|
||||
|
||||
badEv := false
|
||||
switch a.Event {
|
||||
case "create", "like", "mention", "reply":
|
||||
// skip
|
||||
default:
|
||||
badEv = true
|
||||
}
|
||||
|
||||
if own && !badEv {
|
||||
phraseName = "." + a.ElementType + "_own_" + a.Event
|
||||
} else if !badEv {
|
||||
phraseName = "." + a.ElementType + "_" + a.Event
|
||||
} else if own {
|
||||
phraseName = "." + a.ElementType + "_own"
|
||||
} else {
|
||||
phraseName = "." + a.ElementType
|
||||
}
|
||||
|
||||
return buildAlertString(phraseName, []string{a.Actor.Name, area}, url, a.Actor.Avatar, a.ASID), nil
|
||||
func AddActivityAndNotifyAll(actor int, targetUser int, event string, elementType string, elementID int) error {
|
||||
res, err := alertStmts.addActivity.Exec(actor, targetUser, event, elementType, elementID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
lastID, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return NotifyWatchers(lastID)
|
||||
}
|
||||
|
||||
func buildAlertString(msg string, sub []string, path, avatar string, asid int) string {
|
||||
var sb strings.Builder
|
||||
buildAlertSb(&sb, msg, sub, path, avatar, asid)
|
||||
return sb.String()
|
||||
func AddActivityAndNotifyTarget(actor int, targetUser int, event string, elementType string, elementID int) error {
|
||||
res, err := alertStmts.addActivity.Exec(actor, targetUser, event, elementType, elementID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
lastID, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = NotifyOne(targetUser, lastID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Live alerts, if the target is online and WebSockets is enabled
|
||||
_ = WsHub.pushAlert(targetUser, int(lastID), event, elementType, actor, targetUser, elementID)
|
||||
return nil
|
||||
}
|
||||
|
||||
const AlertsGrowHint2 = len(`{"msg":"","sub":[],"path":"","img":"","id":}`) + 5 + 3 + 1 + 1 + 1
|
||||
|
||||
// TODO: Use a string builder?
|
||||
func buildAlertSb(sb *strings.Builder, msg string, sub []string, path, avatar string, asid int) {
|
||||
sb.WriteString(`{"msg":"`)
|
||||
sb.WriteString(escapeTextInJson(msg))
|
||||
sb.WriteString(`","sub":[`)
|
||||
for i, it := range sub {
|
||||
if i != 0 {
|
||||
sb.WriteString(",\"")
|
||||
} else {
|
||||
sb.WriteString("\"")
|
||||
}
|
||||
sb.WriteString(escapeTextInJson(it))
|
||||
sb.WriteString("\"")
|
||||
}
|
||||
sb.WriteString(`],"path":"`)
|
||||
sb.WriteString(escapeTextInJson(path))
|
||||
sb.WriteString(`","img":"`)
|
||||
sb.WriteString(escapeTextInJson(avatar))
|
||||
sb.WriteString(`","id":`)
|
||||
sb.WriteString(strconv.Itoa(asid))
|
||||
sb.WriteRune('}')
|
||||
func NotifyOne(watcher int, asid int64) error {
|
||||
_, err := alertStmts.notifyOne.Exec(watcher, asid)
|
||||
return err
|
||||
}
|
||||
|
||||
func BuildAlertSb(sb *strings.Builder, a *Alert, u *User /* The current user */) (err error) {
|
||||
var targetUser *User
|
||||
if a.Actor == nil {
|
||||
a.Actor, err = Users.Get(a.ActorID)
|
||||
if err != nil {
|
||||
return errors.New(phrases.GetErrorPhrase("alerts_no_actor"))
|
||||
}
|
||||
}
|
||||
func NotifyWatchers(asid int64) error {
|
||||
_, err := alertStmts.notifyWatchers.Exec(asid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
/*if a.ElementType != "forum" {
|
||||
targetUser, err = users.Get(a.TargetUserID)
|
||||
if err != nil {
|
||||
LocalErrorJS("Unable to find the target user",w,r)
|
||||
return
|
||||
}
|
||||
}*/
|
||||
if a.Event == "friend_invite" {
|
||||
buildAlertSb(sb, ".new_friend_invite", []string{a.Actor.Name}, a.Actor.Link, a.Actor.Avatar, a.ASID)
|
||||
return nil
|
||||
}
|
||||
// Alert the subscribers about this without blocking us from doing something else
|
||||
if EnableWebsockets {
|
||||
go notifyWatchers(asid)
|
||||
}
|
||||
|
||||
// Not that many events for us to handle in a forum
|
||||
if a.ElementType == "forum" {
|
||||
if a.Event == "reply" {
|
||||
topic, err := Topics.Get(a.ElementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find linked topic %d", a.ElementID)
|
||||
return errors.New(phrases.GetErrorPhrase("alerts_no_linked_topic"))
|
||||
}
|
||||
// Store the forum ID in the targetUser column instead of making a new one? o.O
|
||||
// Add an additional column for extra information later on when we add the ability to link directly to posts. We don't need the forum data for now...
|
||||
buildAlertSb(sb, ".forum_new_topic", []string{a.Actor.Name, topic.Title}, topic.Link, a.Actor.Avatar, a.ASID)
|
||||
return nil
|
||||
}
|
||||
buildAlertSb(sb, ".forum_unknown_action", []string{a.Actor.Name}, "", a.Actor.Avatar, a.ASID)
|
||||
return nil
|
||||
}
|
||||
|
||||
var url, area string
|
||||
own := false
|
||||
// TODO: Avoid loading a bit of data twice
|
||||
switch a.ElementType {
|
||||
case "convo":
|
||||
convo, err := Convos.Get(a.ElementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find linked convo %d", a.ElementID)
|
||||
return errors.New(phrases.GetErrorPhrase("alerts_no_linked_convo"))
|
||||
}
|
||||
url = convo.Link
|
||||
case "topic":
|
||||
topic, err := Topics.Get(a.ElementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find linked topic %d", a.ElementID)
|
||||
return errors.New(phrases.GetErrorPhrase("alerts_no_linked_topic"))
|
||||
}
|
||||
url = topic.Link
|
||||
area = topic.Title
|
||||
own = a.TargetUserID == u.ID
|
||||
case "user":
|
||||
targetUser, err = Users.Get(a.ElementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find target user %d", a.ElementID)
|
||||
return errors.New(phrases.GetErrorPhrase("alerts_no_target_user"))
|
||||
}
|
||||
area = targetUser.Name
|
||||
url = targetUser.Link
|
||||
own = a.TargetUserID == u.ID
|
||||
case "post":
|
||||
t, err := TopicByReplyID(a.ElementID)
|
||||
if err != nil {
|
||||
DebugLogf("Unable to find linked topic by reply ID %d", a.ElementID)
|
||||
return errors.New(phrases.GetErrorPhrase("alerts_no_linked_topic_by_reply"))
|
||||
}
|
||||
url = t.Link
|
||||
area = t.Title
|
||||
own = a.TargetUserID == u.ID
|
||||
default:
|
||||
return errors.New(phrases.GetErrorPhrase("alerts_invalid_elementtype"))
|
||||
}
|
||||
|
||||
sb.WriteString(`{"msg":".`)
|
||||
sb.WriteString(a.ElementType)
|
||||
if own {
|
||||
sb.WriteString("_own_")
|
||||
} else {
|
||||
sb.WriteRune('_')
|
||||
}
|
||||
switch a.Event {
|
||||
case "create", "like", "mention", "reply":
|
||||
sb.WriteString(a.Event)
|
||||
}
|
||||
|
||||
sb.WriteString(`","sub":["`)
|
||||
sb.WriteString(escapeTextInJson(a.Actor.Name))
|
||||
sb.WriteString("\",\"")
|
||||
sb.WriteString(escapeTextInJson(area))
|
||||
sb.WriteString(`"],"path":"`)
|
||||
sb.WriteString(escapeTextInJson(url))
|
||||
sb.WriteString(`","img":"`)
|
||||
sb.WriteString(escapeTextInJson(a.Actor.Avatar))
|
||||
sb.WriteString(`","id":`)
|
||||
sb.WriteString(strconv.Itoa(a.ASID))
|
||||
sb.WriteRune('}')
|
||||
|
||||
return nil
|
||||
return nil
|
||||
}
|
||||
|
||||
//const AlertsGrowHint3 = len(`{"msg":"._","sub":["",""],"path":"","img":"","id":}`) + 3 + 2 + 2 + 2 + 2 + 1
|
||||
func notifyWatchers(asid int64) {
|
||||
rows, err := alertStmts.getWatchers.Query(asid)
|
||||
if err != nil && err != ErrNoRows {
|
||||
LogError(err)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// TODO: Create a notifier structure?
|
||||
func AddActivityAndNotifyAll(a Alert) error {
|
||||
id, err := Activity.Add(a)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return NotifyWatchers(id)
|
||||
}
|
||||
|
||||
// TODO: Create a notifier structure?
|
||||
func AddActivityAndNotifyTarget(a Alert) error {
|
||||
id, err := Activity.Add(a)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = ActivityMatches.Add(a.TargetUserID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
a.ASID = id
|
||||
|
||||
// Live alerts, if the target is online and WebSockets is enabled
|
||||
if EnableWebsockets {
|
||||
go func() {
|
||||
defer EatPanics()
|
||||
_ = WsHub.pushAlert(a.TargetUserID, a)
|
||||
//fmt.Println("err:",err)
|
||||
}()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: Create a notifier structure?
|
||||
func NotifyWatchers(asid int) error {
|
||||
_, err := alertStmts.notifyWatchers.Exec(asid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Alert the subscribers about this without blocking us from doing something else
|
||||
if EnableWebsockets {
|
||||
go func() {
|
||||
defer EatPanics()
|
||||
notifyWatchers(asid)
|
||||
}()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func notifyWatchers(asid int) {
|
||||
rows, e := alertStmts.getWatchers.Query(asid)
|
||||
if e != nil && e != ErrNoRows {
|
||||
LogError(e)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var uid int
|
||||
var uids []int
|
||||
for rows.Next() {
|
||||
if e := rows.Scan(&uid); e != nil {
|
||||
LogError(e)
|
||||
return
|
||||
}
|
||||
uids = append(uids, uid)
|
||||
}
|
||||
if e = rows.Err(); e != nil {
|
||||
LogError(e)
|
||||
return
|
||||
}
|
||||
|
||||
alert, e := Activity.Get(asid)
|
||||
if e != nil && e != ErrNoRows {
|
||||
LogError(e)
|
||||
return
|
||||
}
|
||||
_ = WsHub.pushAlerts(uids, alert)
|
||||
}
|
||||
|
||||
func DismissAlert(uid, aid int) {
|
||||
_ = WsHub.PushMessage(uid, `{"event":"dismiss-alert","id":`+strconv.Itoa(aid)+`}`)
|
||||
var uid int
|
||||
var uids []int
|
||||
for rows.Next() {
|
||||
err := rows.Scan(&uid)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return
|
||||
}
|
||||
uids = append(uids, uid)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return
|
||||
}
|
||||
|
||||
var actorID, targetUserID, elementID int
|
||||
var event, elementType string
|
||||
err = alertStmts.getActivityEntry.QueryRow(asid).Scan(&actorID, &targetUserID, &event, &elementType, &elementID)
|
||||
if err != nil && err != ErrNoRows {
|
||||
LogError(err)
|
||||
return
|
||||
}
|
||||
|
||||
_ = WsHub.pushAlerts(uids, int(asid), event, elementType, actorID, targetUserID, elementID)
|
||||
}
|
||||
|
@ -1,91 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var Analytics AnalyticsStore
|
||||
|
||||
type AnalyticsTimeRange struct {
|
||||
Quantity int
|
||||
Unit string
|
||||
Slices int
|
||||
SliceWidth int
|
||||
Range string
|
||||
}
|
||||
|
||||
type AnalyticsStore interface {
|
||||
FillViewMap(tbl string, tr *AnalyticsTimeRange, labelList []int64, viewMap map[int64]int64, param string, args ...interface{}) (map[int64]int64, error)
|
||||
}
|
||||
|
||||
type DefaultAnalytics struct {
|
||||
}
|
||||
|
||||
func NewDefaultAnalytics() *DefaultAnalytics {
|
||||
return &DefaultAnalytics{}
|
||||
}
|
||||
|
||||
/*
|
||||
rows, e := qgen.NewAcc().Select("viewchunks_systems").Columns("count,createdAt").Where("system=?").DateCutoff("createdAt", timeRange.Quantity, timeRange.Unit).Query(system)
|
||||
if e != nil && e != sql.ErrNoRows {
|
||||
return c.InternalError(e, w, r)
|
||||
}
|
||||
viewMap, e = c.AnalyticsRowsToViewMap(rows, labelList, viewMap)
|
||||
if e != nil {
|
||||
return c.InternalError(e, w, r)
|
||||
}
|
||||
*/
|
||||
|
||||
func (s *DefaultAnalytics) FillViewMap(tbl string, tr *AnalyticsTimeRange, labelList []int64, viewMap map[int64]int64, param string, args ...interface{}) (map[int64]int64, error) {
|
||||
ac := qgen.NewAcc().Select(tbl).Columns("count,createdAt")
|
||||
if param != "" {
|
||||
ac = ac.Where(param + "=?")
|
||||
}
|
||||
rows, e := ac.DateCutoff("createdAt", tr.Quantity, tr.Unit).Query(args...)
|
||||
if e != nil && e != sql.ErrNoRows {
|
||||
return nil, e
|
||||
}
|
||||
return AnalyticsRowsToViewMap(rows, labelList, viewMap)
|
||||
}
|
||||
|
||||
// TODO: Clamp it rather than using an offset off the current time to avoid chaotic changes in stats as adjacent sets converge and diverge?
|
||||
func AnalyticsTimeRangeToLabelList(tr *AnalyticsTimeRange) (revLabelList []int64, labelList []int64, viewMap map[int64]int64) {
|
||||
viewMap = make(map[int64]int64)
|
||||
currentTime := time.Now().Unix()
|
||||
for i := 1; i <= tr.Slices; i++ {
|
||||
label := currentTime - int64(i*tr.SliceWidth)
|
||||
revLabelList = append(revLabelList, label)
|
||||
viewMap[label] = 0
|
||||
}
|
||||
labelList = append(labelList, revLabelList...)
|
||||
return revLabelList, labelList, viewMap
|
||||
}
|
||||
|
||||
func AnalyticsRowsToViewMap(rows *sql.Rows, labelList []int64, viewMap map[int64]int64) (map[int64]int64, error) {
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
var count int64
|
||||
var createdAt time.Time
|
||||
e := rows.Scan(&count, &createdAt)
|
||||
if e != nil {
|
||||
return viewMap, e
|
||||
}
|
||||
unixCreatedAt := createdAt.Unix()
|
||||
// TODO: Bulk log this
|
||||
if Dev.SuperDebug {
|
||||
log.Print("count: ", count)
|
||||
log.Print("createdAt: ", createdAt, " - ", unixCreatedAt)
|
||||
}
|
||||
for _, value := range labelList {
|
||||
if unixCreatedAt > value {
|
||||
viewMap[value] += count
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return viewMap, rows.Err()
|
||||
}
|
@ -2,327 +2,28 @@ package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
|
||||
//"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var Attachments AttachmentStore
|
||||
|
||||
var ErrCorruptAttachPath = errors.New("corrupt attachment path")
|
||||
|
||||
type MiniAttachment struct {
|
||||
ID int
|
||||
SectionID int
|
||||
OriginID int
|
||||
UploadedBy int
|
||||
Path string
|
||||
Extra string
|
||||
|
||||
Image bool
|
||||
Ext string
|
||||
}
|
||||
|
||||
type Attachment struct {
|
||||
ID int
|
||||
SectionTable string
|
||||
SectionID int
|
||||
OriginTable string
|
||||
OriginID int
|
||||
UploadedBy int
|
||||
Path string
|
||||
Extra string
|
||||
|
||||
Image bool
|
||||
Ext string
|
||||
}
|
||||
|
||||
type AttachmentStore interface {
|
||||
GetForRenderRoute(filename string, sid int, sectionTable string) (*Attachment, error)
|
||||
FGet(id int) (*Attachment, error)
|
||||
Get(id int) (*MiniAttachment, error)
|
||||
MiniGetList(originTable string, originID int) (alist []*MiniAttachment, err error)
|
||||
BulkMiniGetList(originTable string, ids []int) (amap map[int][]*MiniAttachment, err error)
|
||||
Add(sectionID int, sectionTable string, originID int, originTable string, uploadedBy int, path, extra string) (int, error)
|
||||
MoveTo(sectionID, originID int, originTable string) error
|
||||
MoveToByExtra(sectionID int, originTable, extra string) error
|
||||
Count() int
|
||||
CountIn(originTable string, oid int) int
|
||||
CountInPath(path string) int
|
||||
Delete(id int) error
|
||||
|
||||
AddLinked(otable string, oid int) (err error)
|
||||
RemoveLinked(otable string, oid int) (err error)
|
||||
Add(sectionID int, sectionTable string, originID int, originTable string, uploadedBy int, path string) error
|
||||
}
|
||||
|
||||
type DefaultAttachmentStore struct {
|
||||
getForRenderRoute *sql.Stmt
|
||||
|
||||
fget *sql.Stmt
|
||||
get *sql.Stmt
|
||||
getByObj *sql.Stmt
|
||||
add *sql.Stmt
|
||||
count *sql.Stmt
|
||||
countIn *sql.Stmt
|
||||
countInPath *sql.Stmt
|
||||
move *sql.Stmt
|
||||
moveByExtra *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
|
||||
replyUpdateAttachs *sql.Stmt
|
||||
topicUpdateAttachs *sql.Stmt
|
||||
add *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultAttachmentStore(acc *qgen.Accumulator) (*DefaultAttachmentStore, error) {
|
||||
a := "attachments"
|
||||
return &DefaultAttachmentStore{
|
||||
getForRenderRoute: acc.Select(a).Columns("sectionTable, originID, originTable, uploadedBy, path").Where("path=? AND sectionID=? AND sectionTable=?").Prepare(),
|
||||
|
||||
fget: acc.Select(a).Columns("originTable, originID, sectionTable, sectionID, uploadedBy, path, extra").Where("attachID=?").Prepare(),
|
||||
get: acc.Select(a).Columns("originID, sectionID, uploadedBy, path, extra").Where("attachID=?").Prepare(),
|
||||
getByObj: acc.Select(a).Columns("attachID, sectionID, uploadedBy, path, extra").Where("originTable=? AND originID=?").Prepare(),
|
||||
add: acc.Insert(a).Columns("sectionID, sectionTable, originID, originTable, uploadedBy, path, extra").Fields("?,?,?,?,?,?,?").Prepare(),
|
||||
count: acc.Count(a).Prepare(),
|
||||
countIn: acc.Count(a).Where("originTable=? and originID=?").Prepare(),
|
||||
countInPath: acc.Count(a).Where("path=?").Prepare(),
|
||||
move: acc.Update(a).Set("sectionID=?").Where("originID=? AND originTable=?").Prepare(),
|
||||
moveByExtra: acc.Update(a).Set("sectionID=?").Where("originTable=? AND extra=?").Prepare(),
|
||||
delete: acc.Delete(a).Where("attachID=?").Prepare(),
|
||||
|
||||
// TODO: Less race-y attachment count updates
|
||||
replyUpdateAttachs: acc.Update("replies").Set("attachCount=?").Where("rid=?").Prepare(),
|
||||
topicUpdateAttachs: acc.Update("topics").Set("attachCount=?").Where("tid=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
func NewDefaultAttachmentStore() (*DefaultAttachmentStore, error) {
|
||||
acc := qgen.NewAcc()
|
||||
return &DefaultAttachmentStore{
|
||||
add: acc.Insert("attachments").Columns("sectionID, sectionTable, originID, originTable, uploadedBy, path").Fields("?,?,?,?,?,?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
// TODO: Revamp this to make it less of a copy-paste from the original code in the route
|
||||
// ! Lacks some attachment initialisation code
|
||||
func (s *DefaultAttachmentStore) GetForRenderRoute(filename string, sid int, sectionTable string) (*Attachment, error) {
|
||||
a := &Attachment{SectionID: sid}
|
||||
e := s.getForRenderRoute.QueryRow(filename, sid, sectionTable).Scan(&a.SectionTable, &a.OriginID, &a.OriginTable, &a.UploadedBy, &a.Path)
|
||||
// TODO: Initialise attachment struct fields?
|
||||
return a, e
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) MiniGetList(originTable string, originID int) (alist []*MiniAttachment, err error) {
|
||||
rows, err := s.getByObj.Query(originTable, originID)
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
a := &MiniAttachment{OriginID: originID}
|
||||
err := rows.Scan(&a.ID, &a.SectionID, &a.UploadedBy, &a.Path, &a.Extra)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
a.Ext = strings.TrimPrefix(filepath.Ext(a.Path), ".")
|
||||
if len(a.Ext) == 0 {
|
||||
return nil, ErrCorruptAttachPath
|
||||
}
|
||||
a.Image = ImageFileExts.Contains(a.Ext)
|
||||
alist = append(alist, a)
|
||||
}
|
||||
if err = rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(alist) == 0 {
|
||||
err = sql.ErrNoRows
|
||||
}
|
||||
return alist, err
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) BulkMiniGetList(originTable string, ids []int) (amap map[int][]*MiniAttachment, err error) {
|
||||
if len(ids) == 0 {
|
||||
return nil, sql.ErrNoRows
|
||||
}
|
||||
if len(ids) == 1 {
|
||||
res, err := s.MiniGetList(originTable, ids[0])
|
||||
return map[int][]*MiniAttachment{ids[0]: res}, err
|
||||
}
|
||||
|
||||
amap = make(map[int][]*MiniAttachment)
|
||||
var buffer []*MiniAttachment
|
||||
var currentID int
|
||||
rows, err := qgen.NewAcc().Select("attachments").Columns("attachID,sectionID,originID,uploadedBy,path").Where("originTable=?").In("originID", ids).Orderby("originID ASC").Query(originTable)
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
a := &MiniAttachment{}
|
||||
err := rows.Scan(&a.ID, &a.SectionID, &a.OriginID, &a.UploadedBy, &a.Path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
a.Ext = strings.TrimPrefix(filepath.Ext(a.Path), ".")
|
||||
if len(a.Ext) == 0 {
|
||||
return nil, ErrCorruptAttachPath
|
||||
}
|
||||
a.Image = ImageFileExts.Contains(a.Ext)
|
||||
if currentID == 0 {
|
||||
currentID = a.OriginID
|
||||
}
|
||||
if a.OriginID != currentID {
|
||||
if len(buffer) > 0 {
|
||||
amap[currentID] = buffer
|
||||
currentID = a.OriginID
|
||||
buffer = nil
|
||||
}
|
||||
}
|
||||
buffer = append(buffer, a)
|
||||
}
|
||||
if len(buffer) > 0 {
|
||||
amap[currentID] = buffer
|
||||
}
|
||||
return amap, rows.Err()
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) FGet(id int) (*Attachment, error) {
|
||||
a := &Attachment{ID: id}
|
||||
e := s.fget.QueryRow(id).Scan(&a.OriginTable, &a.OriginID, &a.SectionTable, &a.SectionID, &a.UploadedBy, &a.Path, &a.Extra)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
a.Ext = strings.TrimPrefix(filepath.Ext(a.Path), ".")
|
||||
if len(a.Ext) == 0 {
|
||||
return nil, ErrCorruptAttachPath
|
||||
}
|
||||
a.Image = ImageFileExts.Contains(a.Ext)
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) Get(id int) (*MiniAttachment, error) {
|
||||
a := &MiniAttachment{ID: id}
|
||||
err := s.get.QueryRow(id).Scan(&a.OriginID, &a.SectionID, &a.UploadedBy, &a.Path, &a.Extra)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
a.Ext = strings.TrimPrefix(filepath.Ext(a.Path), ".")
|
||||
if len(a.Ext) == 0 {
|
||||
return nil, ErrCorruptAttachPath
|
||||
}
|
||||
a.Image = ImageFileExts.Contains(a.Ext)
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) Add(sectionID int, sectionTable string, originID int, originTable string, uploadedBy int, path, extra string) (int, error) {
|
||||
res, err := s.add.Exec(sectionID, sectionTable, originID, originTable, uploadedBy, path, extra)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
lid, err := res.LastInsertId()
|
||||
return int(lid), err
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) MoveTo(sectionID, originID int, originTable string) error {
|
||||
_, err := s.move.Exec(sectionID, originID, originTable)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) MoveToByExtra(sectionID int, originTable, extra string) error {
|
||||
_, err := s.moveByExtra.Exec(sectionID, originTable, extra)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) Count() (count int) {
|
||||
e := s.count.QueryRow().Scan(&count)
|
||||
if e != nil {
|
||||
LogError(e)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) CountIn(originTable string, oid int) (count int) {
|
||||
e := s.countIn.QueryRow(originTable, oid).Scan(&count)
|
||||
if e != nil {
|
||||
LogError(e)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) CountInPath(path string) (count int) {
|
||||
e := s.countInPath.QueryRow(path).Scan(&count)
|
||||
if e != nil {
|
||||
LogError(e)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func (s *DefaultAttachmentStore) Delete(id int) error {
|
||||
_, e := s.delete.Exec(id)
|
||||
return e
|
||||
}
|
||||
|
||||
// TODO: Split this out of this store
|
||||
func (s *DefaultAttachmentStore) AddLinked(otable string, oid int) (err error) {
|
||||
switch otable {
|
||||
case "topics":
|
||||
_, err = s.topicUpdateAttachs.Exec(s.CountIn(otable, oid), oid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = Topics.Reload(oid)
|
||||
case "replies":
|
||||
_, err = s.replyUpdateAttachs.Exec(s.CountIn(otable, oid), oid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = Rstore.GetCache().Remove(oid)
|
||||
}
|
||||
if err == sql.ErrNoRows {
|
||||
err = nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Split this out of this store
|
||||
func (s *DefaultAttachmentStore) RemoveLinked(otable string, oid int) (err error) {
|
||||
switch otable {
|
||||
case "topics":
|
||||
_, err = s.topicUpdateAttachs.Exec(s.CountIn(otable, oid), oid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if tc := Topics.GetCache(); tc != nil {
|
||||
tc.Remove(oid)
|
||||
}
|
||||
case "replies":
|
||||
_, err = s.replyUpdateAttachs.Exec(s.CountIn(otable, oid), oid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = Rstore.GetCache().Remove(oid)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Add a table for the files and lock the file row when performing tasks related to the file
|
||||
func DeleteAttachment(aid int) error {
|
||||
a, err := Attachments.FGet(aid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = deleteAttachment(a)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = Attachments.RemoveLinked(a.OriginTable, a.OriginID)
|
||||
return nil
|
||||
}
|
||||
|
||||
func deleteAttachment(a *Attachment) error {
|
||||
err := Attachments.Delete(a.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
count := Attachments.CountInPath(a.Path)
|
||||
if count == 0 {
|
||||
err := os.Remove("./attachs/" + a.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
func (store *DefaultAttachmentStore) Add(sectionID int, sectionTable string, originID int, originTable string, uploadedBy int, path string) error {
|
||||
_, err := store.add.Exec(sectionID, sectionTable, originID, originTable, uploadedBy, path)
|
||||
return err
|
||||
}
|
||||
|
@ -2,128 +2,110 @@ package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"time"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var ModLogs LogStore
|
||||
var AdminLogs LogStore
|
||||
|
||||
type LogItem struct {
|
||||
Action string
|
||||
ElementID int
|
||||
ElementType string
|
||||
IP string
|
||||
ActorID int
|
||||
DoneAt string
|
||||
Extra string
|
||||
Action string
|
||||
ElementID int
|
||||
ElementType string
|
||||
IPAddress string
|
||||
ActorID int
|
||||
DoneAt string
|
||||
}
|
||||
|
||||
type LogStore interface {
|
||||
Create(action string, elementID int, elementType, ip string, actorID int) (err error)
|
||||
CreateExtra(action string, elementID int, elementType, ip string, actorID int, extra string) (err error)
|
||||
Count() int
|
||||
GetOffset(offset, perPage int) (logs []LogItem, err error)
|
||||
Create(action string, elementID int, elementType string, ipaddress string, actorID int) (err error)
|
||||
GlobalCount() int
|
||||
GetOffset(offset int, perPage int) (logs []LogItem, err error)
|
||||
}
|
||||
|
||||
type SQLModLogStore struct {
|
||||
create *sql.Stmt
|
||||
count *sql.Stmt
|
||||
getOffset *sql.Stmt
|
||||
create *sql.Stmt
|
||||
count *sql.Stmt
|
||||
getOffset *sql.Stmt
|
||||
}
|
||||
|
||||
func NewModLogStore(acc *qgen.Accumulator) (*SQLModLogStore, error) {
|
||||
ml := "moderation_logs"
|
||||
// TODO: Shorten name of ipaddress column to ip
|
||||
cols := "action, elementID, elementType, ipaddress, actorID, doneAt, extra"
|
||||
return &SQLModLogStore{
|
||||
create: acc.Insert(ml).Columns(cols).Fields("?,?,?,?,?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
count: acc.Count(ml).Prepare(),
|
||||
getOffset: acc.Select(ml).Columns(cols).Orderby("doneAt DESC").Limit("?,?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
return &SQLModLogStore{
|
||||
create: acc.Insert("moderation_logs").Columns("action, elementID, elementType, ipaddress, actorID, doneAt").Fields("?,?,?,?,?,UTC_TIMESTAMP()").Prepare(),
|
||||
count: acc.Count("moderation_logs").Prepare(),
|
||||
getOffset: acc.Select("moderation_logs").Columns("action, elementID, elementType, ipaddress, actorID, doneAt").Orderby("doneAt DESC").Limit("?,?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
// TODO: Make a store for this?
|
||||
func (s *SQLModLogStore) Create(action string, elementID int, elementType, ip string, actorID int) (err error) {
|
||||
return s.CreateExtra(action, elementID, elementType, ip, actorID, "")
|
||||
func (store *SQLModLogStore) Create(action string, elementID int, elementType string, ipaddress string, actorID int) (err error) {
|
||||
_, err = store.create.Exec(action, elementID, elementType, ipaddress, actorID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *SQLModLogStore) CreateExtra(action string, elementID int, elementType, ip string, actorID int, extra string) (err error) {
|
||||
_, err = s.create.Exec(action, elementID, elementType, ip, actorID, extra)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *SQLModLogStore) Count() (count int) {
|
||||
err := s.count.QueryRow().Scan(&count)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return count
|
||||
func (store *SQLModLogStore) GlobalCount() (logCount int) {
|
||||
err := store.count.QueryRow().Scan(&logCount)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return logCount
|
||||
}
|
||||
|
||||
func buildLogList(rows *sql.Rows) (logs []LogItem, err error) {
|
||||
for rows.Next() {
|
||||
var l LogItem
|
||||
var doneAt time.Time
|
||||
err := rows.Scan(&l.Action, &l.ElementID, &l.ElementType, &l.IP, &l.ActorID, &doneAt, &l.Extra)
|
||||
if err != nil {
|
||||
return logs, err
|
||||
}
|
||||
l.DoneAt = doneAt.Format("2006-01-02 15:04:05")
|
||||
logs = append(logs, l)
|
||||
}
|
||||
return logs, rows.Err()
|
||||
for rows.Next() {
|
||||
var log LogItem
|
||||
err := rows.Scan(&log.Action, &log.ElementID, &log.ElementType, &log.IPAddress, &log.ActorID, &log.DoneAt)
|
||||
if err != nil {
|
||||
return logs, err
|
||||
}
|
||||
logs = append(logs, log)
|
||||
}
|
||||
return logs, rows.Err()
|
||||
}
|
||||
|
||||
func (s *SQLModLogStore) GetOffset(offset, perPage int) (logs []LogItem, err error) {
|
||||
rows, err := s.getOffset.Query(offset, perPage)
|
||||
if err != nil {
|
||||
return logs, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return buildLogList(rows)
|
||||
func (store *SQLModLogStore) GetOffset(offset int, perPage int) (logs []LogItem, err error) {
|
||||
rows, err := store.getOffset.Query(offset, perPage)
|
||||
if err != nil {
|
||||
return logs, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return buildLogList(rows)
|
||||
}
|
||||
|
||||
type SQLAdminLogStore struct {
|
||||
create *sql.Stmt
|
||||
count *sql.Stmt
|
||||
getOffset *sql.Stmt
|
||||
create *sql.Stmt
|
||||
count *sql.Stmt
|
||||
getOffset *sql.Stmt
|
||||
}
|
||||
|
||||
func NewAdminLogStore(acc *qgen.Accumulator) (*SQLAdminLogStore, error) {
|
||||
al := "administration_logs"
|
||||
cols := "action, elementID, elementType, ipaddress, actorID, doneAt, extra"
|
||||
return &SQLAdminLogStore{
|
||||
create: acc.Insert(al).Columns(cols).Fields("?,?,?,?,?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
count: acc.Count(al).Prepare(),
|
||||
getOffset: acc.Select(al).Columns(cols).Orderby("doneAt DESC").Limit("?,?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
return &SQLAdminLogStore{
|
||||
create: acc.Insert("administration_logs").Columns("action, elementID, elementType, ipaddress, actorID, doneAt").Fields("?,?,?,?,?,UTC_TIMESTAMP()").Prepare(),
|
||||
count: acc.Count("administration_logs").Prepare(),
|
||||
getOffset: acc.Select("administration_logs").Columns("action, elementID, elementType, ipaddress, actorID, doneAt").Orderby("doneAt DESC").Limit("?,?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
// TODO: Make a store for this?
|
||||
func (s *SQLAdminLogStore) Create(action string, elementID int, elementType, ip string, actorID int) (err error) {
|
||||
return s.CreateExtra(action, elementID, elementType, ip, actorID, "")
|
||||
func (store *SQLAdminLogStore) Create(action string, elementID int, elementType string, ipaddress string, actorID int) (err error) {
|
||||
_, err = store.create.Exec(action, elementID, elementType, ipaddress, actorID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *SQLAdminLogStore) CreateExtra(action string, elementID int, elementType, ip string, actorID int, extra string) (err error) {
|
||||
_, err = s.create.Exec(action, elementID, elementType, ip, actorID, extra)
|
||||
return err
|
||||
func (store *SQLAdminLogStore) GlobalCount() (logCount int) {
|
||||
err := store.count.QueryRow().Scan(&logCount)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return logCount
|
||||
}
|
||||
|
||||
func (s *SQLAdminLogStore) Count() (count int) {
|
||||
err := s.count.QueryRow().Scan(&count)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func (s *SQLAdminLogStore) GetOffset(offset, perPage int) (logs []LogItem, err error) {
|
||||
rows, err := s.getOffset.Query(offset, perPage)
|
||||
if err != nil {
|
||||
return logs, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return buildLogList(rows)
|
||||
func (store *SQLAdminLogStore) GetOffset(offset int, perPage int) (logs []LogItem, err error) {
|
||||
rows, err := store.getOffset.Query(offset, perPage)
|
||||
if err != nil {
|
||||
return logs, err
|
||||
}
|
||||
defer rows.Close()
|
||||
return buildLogList(rows)
|
||||
}
|
||||
|
478
common/auth.go
478
common/auth.go
@ -1,7 +1,7 @@
|
||||
/*
|
||||
*
|
||||
* Gosora Authentication Interface
|
||||
* Copyright Azareal 2017 - 2020
|
||||
* Copyright Azareal 2017 - 2019
|
||||
*
|
||||
*/
|
||||
package common
|
||||
@ -16,9 +16,8 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"git.tuxpa.in/a/gosora/common/gauth"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
|
||||
"../query_gen/lib"
|
||||
"./gauth"
|
||||
//"golang.org/x/crypto/argon2"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
@ -41,346 +40,343 @@ var ErrPasswordTooLong = errors.New("The password you selected is too long")
|
||||
var ErrWrongPassword = errors.New("That's not the correct password.")
|
||||
var ErrBadMFAToken = errors.New("I'm not sure where you got that from, but that's not a valid 2FA token")
|
||||
var ErrWrongMFAToken = errors.New("That 2FA token isn't correct")
|
||||
var ErrNoMFAToken = errors.New("This user doesn't have 2FA setup")
|
||||
var ErrSecretError = errors.New("There was a glitch in the system. Please contact your local administrator.")
|
||||
var ErrNoUserByName = errors.New("We couldn't find an account with that username.")
|
||||
var DefaultHashAlgo = "bcrypt" // Override this in the configuration file, not here
|
||||
|
||||
//func(realPassword string, password string, salt string) (err error)
|
||||
var CheckPasswordFuncs = map[string]func(string, string, string) error{
|
||||
"bcrypt": BcryptCheckPassword,
|
||||
//"argon2": Argon2CheckPassword,
|
||||
"bcrypt": BcryptCheckPassword,
|
||||
//"argon2": Argon2CheckPassword,
|
||||
}
|
||||
|
||||
//func(password string) (hashedPassword string, salt string, err error)
|
||||
var GeneratePasswordFuncs = map[string]func(string) (string, string, error){
|
||||
"bcrypt": BcryptGeneratePassword,
|
||||
//"argon2": Argon2GeneratePassword,
|
||||
"bcrypt": BcryptGeneratePassword,
|
||||
//"argon2": Argon2GeneratePassword,
|
||||
}
|
||||
|
||||
// TODO: Redirect 2b to bcrypt too?
|
||||
var HashPrefixes = map[string]string{
|
||||
"$2a$": "bcrypt",
|
||||
//"argon2$": "argon2",
|
||||
"$2a$": "bcrypt",
|
||||
//"argon2$": "argon2",
|
||||
}
|
||||
|
||||
// AuthInt is the main authentication interface.
|
||||
type AuthInt interface {
|
||||
Authenticate(name, password string) (uid int, err error, requiresExtraAuth bool)
|
||||
ValidateMFAToken(mfaToken string, uid int) error
|
||||
Logout(w http.ResponseWriter, uid int)
|
||||
ForceLogout(uid int) error
|
||||
SetCookies(w http.ResponseWriter, uid int, session string)
|
||||
SetProvisionalCookies(w http.ResponseWriter, uid int, session, signedSession string) // To avoid logging someone in until they've passed the MFA check
|
||||
GetCookies(r *http.Request) (uid int, session string, err error)
|
||||
SessionCheck(w http.ResponseWriter, r *http.Request) (u *User, halt bool)
|
||||
CreateSession(uid int) (session string, err error)
|
||||
CreateProvisionalSession(uid int) (provSession, signedSession string, err error) // To avoid logging someone in until they've passed the MFA check
|
||||
Authenticate(username string, password string) (uid int, err error, requiresExtraAuth bool)
|
||||
ValidateMFAToken(mfaToken string, uid int) error
|
||||
Logout(w http.ResponseWriter, uid int)
|
||||
ForceLogout(uid int) error
|
||||
SetCookies(w http.ResponseWriter, uid int, session string)
|
||||
SetProvisionalCookies(w http.ResponseWriter, uid int, session string, signedSession string) // To avoid logging someone in until they've passed the MFA check
|
||||
GetCookies(r *http.Request) (uid int, session string, err error)
|
||||
SessionCheck(w http.ResponseWriter, r *http.Request) (user *User, halt bool)
|
||||
CreateSession(uid int) (session string, err error)
|
||||
CreateProvisionalSession(uid int) (provSession string, signedSession string, err error) // To avoid logging someone in until they've passed the MFA check
|
||||
}
|
||||
|
||||
// DefaultAuth is the default authenticator used by Gosora, may be swapped with an alternate authenticator in some situations. E.g. To support LDAP.
|
||||
type DefaultAuth struct {
|
||||
login *sql.Stmt
|
||||
logout *sql.Stmt
|
||||
updateSession *sql.Stmt
|
||||
login *sql.Stmt
|
||||
logout *sql.Stmt
|
||||
updateSession *sql.Stmt
|
||||
}
|
||||
|
||||
// NewDefaultAuth is a factory for spitting out DefaultAuths
|
||||
func NewDefaultAuth() (*DefaultAuth, error) {
|
||||
acc := qgen.NewAcc()
|
||||
return &DefaultAuth{
|
||||
login: acc.Select("users").Columns("uid, password, salt").Where("name = ?").Prepare(),
|
||||
logout: acc.Update("users").Set("session = ''").Where("uid = ?").Prepare(),
|
||||
updateSession: acc.Update("users").Set("session = ?").Where("uid = ?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
acc := qgen.NewAcc()
|
||||
return &DefaultAuth{
|
||||
login: acc.Select("users").Columns("uid, password, salt").Where("name = ?").Prepare(),
|
||||
logout: acc.Update("users").Set("session = ''").Where("uid = ?").Prepare(),
|
||||
updateSession: acc.Update("users").Set("session = ?").Where("uid = ?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
// Authenticate checks if a specific username and password is valid and returns the UID for the corresponding user, if so. Otherwise, a user safe error.
|
||||
// IF MFA is enabled, then pass it back a flag telling the caller that authentication isn't complete yet
|
||||
// TODO: Find a better way of handling errors we don't want to reach the user
|
||||
func (auth *DefaultAuth) Authenticate(name, password string) (uid int, err error, requiresExtraAuth bool) {
|
||||
var realPassword, salt string
|
||||
err = auth.login.QueryRow(name).Scan(&uid, &realPassword, &salt)
|
||||
if err == ErrNoRows {
|
||||
return 0, ErrNoUserByName, false
|
||||
} else if err != nil {
|
||||
LogError(err)
|
||||
return 0, ErrSecretError, false
|
||||
}
|
||||
func (auth *DefaultAuth) Authenticate(username string, password string) (uid int, err error, requiresExtraAuth bool) {
|
||||
var realPassword, salt string
|
||||
err = auth.login.QueryRow(username).Scan(&uid, &realPassword, &salt)
|
||||
if err == ErrNoRows {
|
||||
return 0, ErrNoUserByName, false
|
||||
} else if err != nil {
|
||||
LogError(err)
|
||||
return 0, ErrSecretError, false
|
||||
}
|
||||
|
||||
err = CheckPassword(realPassword, password, salt)
|
||||
if err == ErrMismatchedHashAndPassword {
|
||||
return 0, ErrWrongPassword, false
|
||||
} else if err != nil {
|
||||
LogError(err)
|
||||
return 0, ErrSecretError, false
|
||||
}
|
||||
err = CheckPassword(realPassword, password, salt)
|
||||
if err == ErrMismatchedHashAndPassword {
|
||||
return 0, ErrWrongPassword, false
|
||||
} else if err != nil {
|
||||
LogError(err)
|
||||
return 0, ErrSecretError, false
|
||||
}
|
||||
|
||||
_, err = MFAstore.Get(uid)
|
||||
if err != sql.ErrNoRows && err != nil {
|
||||
LogError(err)
|
||||
return 0, ErrSecretError, false
|
||||
}
|
||||
if err != ErrNoRows {
|
||||
return uid, nil, true
|
||||
}
|
||||
_, err = MFAstore.Get(uid)
|
||||
if err != sql.ErrNoRows && err != nil {
|
||||
LogError(err)
|
||||
return 0, ErrSecretError, false
|
||||
}
|
||||
if err != ErrNoRows {
|
||||
return uid, nil, true
|
||||
}
|
||||
|
||||
return uid, nil, false
|
||||
return uid, nil, false
|
||||
}
|
||||
|
||||
func (auth *DefaultAuth) ValidateMFAToken(mfaToken string, uid int) error {
|
||||
mfaItem, err := MFAstore.Get(uid)
|
||||
if err != sql.ErrNoRows && err != nil {
|
||||
LogError(err)
|
||||
return ErrSecretError
|
||||
}
|
||||
if err == ErrNoRows {
|
||||
return ErrNoMFAToken
|
||||
}
|
||||
|
||||
ok, err := VerifyGAuthToken(mfaItem.Secret, mfaToken)
|
||||
if err != nil {
|
||||
return ErrBadMFAToken
|
||||
}
|
||||
if ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
for i, scratch := range mfaItem.Scratch {
|
||||
if subtle.ConstantTimeCompare([]byte(scratch), []byte(mfaToken)) == 1 {
|
||||
err = mfaItem.BurnScratch(i)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return ErrSecretError
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return ErrWrongMFAToken
|
||||
mfaItem, err := MFAstore.Get(uid)
|
||||
if err != sql.ErrNoRows && err != nil {
|
||||
LogError(err)
|
||||
return ErrSecretError
|
||||
}
|
||||
if err != ErrNoRows {
|
||||
ok, err := VerifyGAuthToken(mfaItem.Secret, mfaToken)
|
||||
if err != nil {
|
||||
return ErrBadMFAToken
|
||||
}
|
||||
if ok {
|
||||
return nil
|
||||
}
|
||||
for i, scratch := range mfaItem.Scratch {
|
||||
if subtle.ConstantTimeCompare([]byte(scratch), []byte(mfaToken)) == 1 {
|
||||
err = mfaItem.BurnScratch(i)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return ErrSecretError
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return ErrWrongMFAToken
|
||||
}
|
||||
|
||||
// ForceLogout logs the user out of every computer, not just the one they logged out of
|
||||
func (auth *DefaultAuth) ForceLogout(uid int) error {
|
||||
_, err := auth.logout.Exec(uid)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return ErrSecretError
|
||||
}
|
||||
_, err := auth.logout.Exec(uid)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return ErrSecretError
|
||||
}
|
||||
|
||||
// Flush the user out of the cache
|
||||
if uc := Users.GetCache(); uc != nil {
|
||||
uc.Remove(uid)
|
||||
}
|
||||
return nil
|
||||
// Flush the user out of the cache
|
||||
ucache := Users.GetCache()
|
||||
if ucache != nil {
|
||||
ucache.Remove(uid)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func setCookie(w http.ResponseWriter, cookie *http.Cookie, sameSite string) {
|
||||
if v := cookie.String(); v != "" {
|
||||
switch sameSite {
|
||||
case "lax":
|
||||
v = v + "; SameSite=lax"
|
||||
case "strict":
|
||||
v = v + "; SameSite"
|
||||
}
|
||||
w.Header().Add("Set-Cookie", v)
|
||||
}
|
||||
if v := cookie.String(); v != "" {
|
||||
switch sameSite {
|
||||
case "lax":
|
||||
v = v + "; SameSite=lax"
|
||||
case "strict":
|
||||
v = v + "; SameSite"
|
||||
}
|
||||
w.Header().Add("Set-Cookie", v)
|
||||
}
|
||||
}
|
||||
|
||||
func deleteCookie(w http.ResponseWriter, cookie *http.Cookie) {
|
||||
cookie.MaxAge = -1
|
||||
http.SetCookie(w, cookie)
|
||||
cookie.MaxAge = -1
|
||||
http.SetCookie(w, cookie)
|
||||
}
|
||||
|
||||
// Logout logs you out of the computer you requested the logout for, but not the other computers you're logged in with
|
||||
func (auth *DefaultAuth) Logout(w http.ResponseWriter, _ int) {
|
||||
cookie := http.Cookie{Name: "uid", Value: "", Path: "/"}
|
||||
deleteCookie(w, &cookie)
|
||||
cookie = http.Cookie{Name: "session", Value: "", Path: "/"}
|
||||
deleteCookie(w, &cookie)
|
||||
cookie := http.Cookie{Name: "uid", Value: "", Path: "/"}
|
||||
deleteCookie(w, &cookie)
|
||||
cookie = http.Cookie{Name: "session", Value: "", Path: "/"}
|
||||
deleteCookie(w, &cookie)
|
||||
}
|
||||
|
||||
// TODO: Set the cookie domain
|
||||
// SetCookies sets the two cookies required for the current user to be recognised as a specific user in future requests
|
||||
func (auth *DefaultAuth) SetCookies(w http.ResponseWriter, uid int, session string) {
|
||||
cookie := http.Cookie{Name: "uid", Value: strconv.Itoa(uid), Path: "/", MaxAge: int(Year)}
|
||||
setCookie(w, &cookie, "lax")
|
||||
cookie = http.Cookie{Name: "session", Value: session, Path: "/", MaxAge: int(Year)}
|
||||
setCookie(w, &cookie, "lax")
|
||||
cookie := http.Cookie{Name: "uid", Value: strconv.Itoa(uid), Path: "/", MaxAge: int(Year)}
|
||||
setCookie(w, &cookie, "lax")
|
||||
cookie = http.Cookie{Name: "session", Value: session, Path: "/", MaxAge: int(Year)}
|
||||
setCookie(w, &cookie, "lax")
|
||||
}
|
||||
|
||||
// TODO: Set the cookie domain
|
||||
// SetProvisionalCookies sets the two cookies required for guests to be recognised as having passed the initial login but not having passed the additional checks (e.g. multi-factor authentication)
|
||||
func (auth *DefaultAuth) SetProvisionalCookies(w http.ResponseWriter, uid int, provSession, signedSession string) {
|
||||
cookie := http.Cookie{Name: "uid", Value: strconv.Itoa(uid), Path: "/", MaxAge: int(Year)}
|
||||
setCookie(w, &cookie, "lax")
|
||||
cookie = http.Cookie{Name: "provSession", Value: provSession, Path: "/", MaxAge: int(Year)}
|
||||
setCookie(w, &cookie, "lax")
|
||||
cookie = http.Cookie{Name: "signedSession", Value: signedSession, Path: "/", MaxAge: int(Year)}
|
||||
setCookie(w, &cookie, "lax")
|
||||
func (auth *DefaultAuth) SetProvisionalCookies(w http.ResponseWriter, uid int, provSession string, signedSession string) {
|
||||
cookie := http.Cookie{Name: "uid", Value: strconv.Itoa(uid), Path: "/", MaxAge: int(Year)}
|
||||
setCookie(w, &cookie, "lax")
|
||||
cookie = http.Cookie{Name: "provSession", Value: provSession, Path: "/", MaxAge: int(Year)}
|
||||
setCookie(w, &cookie, "lax")
|
||||
cookie = http.Cookie{Name: "signedSession", Value: signedSession, Path: "/", MaxAge: int(Year)}
|
||||
setCookie(w, &cookie, "lax")
|
||||
}
|
||||
|
||||
// GetCookies fetches the current user's session cookies
|
||||
func (auth *DefaultAuth) GetCookies(r *http.Request) (uid int, session string, err error) {
|
||||
// Are there any session cookies..?
|
||||
cookie, err := r.Cookie("uid")
|
||||
if err != nil {
|
||||
return 0, "", err
|
||||
}
|
||||
uid, err = strconv.Atoi(cookie.Value)
|
||||
if err != nil {
|
||||
return 0, "", err
|
||||
}
|
||||
cookie, err = r.Cookie("session")
|
||||
if err != nil {
|
||||
return 0, "", err
|
||||
}
|
||||
return uid, cookie.Value, err
|
||||
// Are there any session cookies..?
|
||||
cookie, err := r.Cookie("uid")
|
||||
if err != nil {
|
||||
return 0, "", err
|
||||
}
|
||||
uid, err = strconv.Atoi(cookie.Value)
|
||||
if err != nil {
|
||||
return 0, "", err
|
||||
}
|
||||
cookie, err = r.Cookie("session")
|
||||
if err != nil {
|
||||
return 0, "", err
|
||||
}
|
||||
return uid, cookie.Value, err
|
||||
}
|
||||
|
||||
// SessionCheck checks if a user has session cookies and whether they're valid
|
||||
func (auth *DefaultAuth) SessionCheck(w http.ResponseWriter, r *http.Request) (user *User, halt bool) {
|
||||
uid, session, err := auth.GetCookies(r)
|
||||
if err != nil {
|
||||
return &GuestUser, false
|
||||
}
|
||||
uid, session, err := auth.GetCookies(r)
|
||||
if err != nil {
|
||||
return &GuestUser, false
|
||||
}
|
||||
|
||||
// Is this session valid..?
|
||||
user, err = Users.Get(uid)
|
||||
if err == ErrNoRows {
|
||||
return &GuestUser, false
|
||||
} else if err != nil {
|
||||
InternalError(err, w, r)
|
||||
return &GuestUser, true
|
||||
}
|
||||
// Is this session valid..?
|
||||
user, err = Users.Get(uid)
|
||||
if err == ErrNoRows {
|
||||
return &GuestUser, false
|
||||
} else if err != nil {
|
||||
InternalError(err, w, r)
|
||||
return &GuestUser, true
|
||||
}
|
||||
|
||||
// We need to do a constant time compare, otherwise someone might be able to deduce the session character by character based on how long it takes to do the comparison. Change this at your own peril.
|
||||
if user.Session == "" || subtle.ConstantTimeCompare([]byte(session), []byte(user.Session)) != 1 {
|
||||
return &GuestUser, false
|
||||
}
|
||||
// We need to do a constant time compare, otherwise someone might be able to deduce the session character by character based on how long it takes to do the comparison. Change this at your own peril.
|
||||
if user.Session == "" || subtle.ConstantTimeCompare([]byte(session), []byte(user.Session)) != 1 {
|
||||
return &GuestUser, false
|
||||
}
|
||||
|
||||
return user, false
|
||||
return user, false
|
||||
}
|
||||
|
||||
// CreateSession generates a new session to allow a remote client to stay logged in as a specific user
|
||||
func (auth *DefaultAuth) CreateSession(uid int) (session string, err error) {
|
||||
session, err = GenerateSafeString(SessionLength)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
session, err = GenerateSafeString(SessionLength)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
_, err = auth.updateSession.Exec(session, uid)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
_, err = auth.updateSession.Exec(session, uid)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Flush the user data from the cache
|
||||
ucache := Users.GetCache()
|
||||
if ucache != nil {
|
||||
ucache.Remove(uid)
|
||||
}
|
||||
return session, nil
|
||||
// Flush the user data from the cache
|
||||
ucache := Users.GetCache()
|
||||
if ucache != nil {
|
||||
ucache.Remove(uid)
|
||||
}
|
||||
return session, nil
|
||||
}
|
||||
|
||||
func (auth *DefaultAuth) CreateProvisionalSession(uid int) (provSession, signedSession string, err error) {
|
||||
provSession, err = GenerateSafeString(SessionLength)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
func (auth *DefaultAuth) CreateProvisionalSession(uid int) (provSession string, signedSession string, err error) {
|
||||
provSession, err = GenerateSafeString(SessionLength)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
|
||||
h := sha256.New()
|
||||
h.Write([]byte(SessionSigningKeyBox.Load().(string)))
|
||||
h.Write([]byte(provSession))
|
||||
h.Write([]byte(strconv.Itoa(uid)))
|
||||
return provSession, hex.EncodeToString(h.Sum(nil)), nil
|
||||
h := sha256.New()
|
||||
h.Write([]byte(SessionSigningKeyBox.Load().(string)))
|
||||
h.Write([]byte(provSession))
|
||||
h.Write([]byte(strconv.Itoa(uid)))
|
||||
return provSession, hex.EncodeToString(h.Sum(nil)), nil
|
||||
}
|
||||
|
||||
func CheckPassword(realPassword, password, salt string) (err error) {
|
||||
blasted := strings.Split(realPassword, "$")
|
||||
prefix := blasted[0]
|
||||
if len(blasted) > 1 {
|
||||
prefix += "$" + blasted[1] + "$"
|
||||
}
|
||||
algo, ok := HashPrefixes[prefix]
|
||||
if !ok {
|
||||
return ErrHashNotExist
|
||||
}
|
||||
checker := CheckPasswordFuncs[algo]
|
||||
return checker(realPassword, password, salt)
|
||||
func CheckPassword(realPassword string, password string, salt string) (err error) {
|
||||
blasted := strings.Split(realPassword, "$")
|
||||
prefix := blasted[0]
|
||||
if len(blasted) > 1 {
|
||||
prefix += "$" + blasted[1] + "$"
|
||||
}
|
||||
algo, ok := HashPrefixes[prefix]
|
||||
if !ok {
|
||||
return ErrHashNotExist
|
||||
}
|
||||
checker := CheckPasswordFuncs[algo]
|
||||
return checker(realPassword, password, salt)
|
||||
}
|
||||
|
||||
func GeneratePassword(password string) (hash, salt string, err error) {
|
||||
gen, ok := GeneratePasswordFuncs[DefaultHashAlgo]
|
||||
if !ok {
|
||||
return "", "", ErrHashNotExist
|
||||
}
|
||||
return gen(password)
|
||||
func GeneratePassword(password string) (hash string, salt string, err error) {
|
||||
gen, ok := GeneratePasswordFuncs[DefaultHashAlgo]
|
||||
if !ok {
|
||||
return "", "", ErrHashNotExist
|
||||
}
|
||||
return gen(password)
|
||||
}
|
||||
|
||||
func BcryptCheckPassword(realPassword, password, salt string) (err error) {
|
||||
return bcrypt.CompareHashAndPassword([]byte(realPassword), []byte(password+salt))
|
||||
func BcryptCheckPassword(realPassword string, password string, salt string) (err error) {
|
||||
return bcrypt.CompareHashAndPassword([]byte(realPassword), []byte(password+salt))
|
||||
}
|
||||
|
||||
// Note: The salt is in the hash, therefore the salt parameter is blank
|
||||
func BcryptGeneratePassword(password string) (hash, salt string, err error) {
|
||||
hashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
return string(hashedPassword), salt, nil
|
||||
func BcryptGeneratePassword(password string) (hash string, salt string, err error) {
|
||||
hashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
return string(hashedPassword), salt, nil
|
||||
}
|
||||
|
||||
/*const (
|
||||
argon2Time uint32 = 3
|
||||
argon2Memory uint32 = 32 * 1024
|
||||
argon2Threads uint8 = 4
|
||||
argon2KeyLen uint32 = 32
|
||||
argon2Time uint32 = 3
|
||||
argon2Memory uint32 = 32 * 1024
|
||||
argon2Threads uint8 = 4
|
||||
argon2KeyLen uint32 = 32
|
||||
)
|
||||
|
||||
func Argon2CheckPassword(realPassword, password, salt string) (err error) {
|
||||
split := strings.Split(realPassword, "$")
|
||||
// TODO: Better validation
|
||||
if len(split) < 5 {
|
||||
return ErrTooFewHashParams
|
||||
}
|
||||
realKey, _ := base64.StdEncoding.DecodeString(split[len(split)-1])
|
||||
time, _ := strconv.Atoi(split[1])
|
||||
memory, _ := strconv.Atoi(split[2])
|
||||
threads, _ := strconv.Atoi(split[3])
|
||||
keyLen, _ := strconv.Atoi(split[4])
|
||||
key := argon2.Key([]byte(password), []byte(salt), uint32(time), uint32(memory), uint8(threads), uint32(keyLen))
|
||||
if subtle.ConstantTimeCompare(realKey, key) != 1 {
|
||||
return ErrMismatchedHashAndPassword
|
||||
}
|
||||
return nil
|
||||
func Argon2CheckPassword(realPassword string, password string, salt string) (err error) {
|
||||
split := strings.Split(realPassword, "$")
|
||||
// TODO: Better validation
|
||||
if len(split) < 5 {
|
||||
return ErrTooFewHashParams
|
||||
}
|
||||
realKey, _ := base64.StdEncoding.DecodeString(split[len(split)-1])
|
||||
time, _ := strconv.Atoi(split[1])
|
||||
memory, _ := strconv.Atoi(split[2])
|
||||
threads, _ := strconv.Atoi(split[3])
|
||||
keyLen, _ := strconv.Atoi(split[4])
|
||||
key := argon2.Key([]byte(password), []byte(salt), uint32(time), uint32(memory), uint8(threads), uint32(keyLen))
|
||||
if subtle.ConstantTimeCompare(realKey, key) != 1 {
|
||||
return ErrMismatchedHashAndPassword
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func Argon2GeneratePassword(password string) (hash, salt string, err error) {
|
||||
sbytes := make([]byte, SaltLength)
|
||||
_, err = rand.Read(sbytes)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
key := argon2.Key([]byte(password), sbytes, argon2Time, argon2Memory, argon2Threads, argon2KeyLen)
|
||||
hash = base64.StdEncoding.EncodeToString(key)
|
||||
return fmt.Sprintf("argon2$%d%d%d%d%s%s", argon2Time, argon2Memory, argon2Threads, argon2KeyLen, salt, hash), string(sbytes), nil
|
||||
func Argon2GeneratePassword(password string) (hash string, salt string, err error) {
|
||||
sbytes := make([]byte, SaltLength)
|
||||
_, err = rand.Read(sbytes)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
key := argon2.Key([]byte(password), sbytes, argon2Time, argon2Memory, argon2Threads, argon2KeyLen)
|
||||
hash = base64.StdEncoding.EncodeToString(key)
|
||||
return fmt.Sprintf("argon2$%d%d%d%d%s%s", argon2Time, argon2Memory, argon2Threads, argon2KeyLen, salt, hash), string(sbytes), nil
|
||||
}
|
||||
*/
|
||||
|
||||
// TODO: Test this with Google Authenticator proper
|
||||
func FriendlyGAuthSecret(secret string) (out string) {
|
||||
for i, char := range secret {
|
||||
out += string(char)
|
||||
if (i+1)%4 == 0 {
|
||||
out += " "
|
||||
}
|
||||
}
|
||||
return strings.TrimSpace(out)
|
||||
for i, char := range secret {
|
||||
out += string(char)
|
||||
if (i+1)%4 == 0 {
|
||||
out += " "
|
||||
}
|
||||
}
|
||||
return strings.TrimSpace(out)
|
||||
}
|
||||
func GenerateGAuthSecret() (string, error) {
|
||||
return GenerateStd32SafeString(14)
|
||||
return GenerateStd32SafeString(14)
|
||||
}
|
||||
func VerifyGAuthToken(secret, token string) (bool, error) {
|
||||
trueToken, err := gauth.GetTOTPToken(secret)
|
||||
return subtle.ConstantTimeCompare([]byte(trueToken), []byte(token)) == 1, err
|
||||
func VerifyGAuthToken(secret string, token string) (bool, error) {
|
||||
trueToken, err := gauth.GetTOTPToken(secret)
|
||||
return subtle.ConstantTimeCompare([]byte(trueToken), []byte(token)) == 1, err
|
||||
}
|
||||
|
@ -11,24 +11,24 @@ var ErrStoreCapacityOverflow = errors.New("This datastore has reached it's maxim
|
||||
|
||||
// nolint
|
||||
type DataStore interface {
|
||||
DirtyGet(id int) interface{}
|
||||
Get(id int) (interface{}, error)
|
||||
BypassGet(id int) (interface{}, error)
|
||||
//Count() int
|
||||
DirtyGet(id int) interface{}
|
||||
Get(id int) (interface{}, error)
|
||||
BypassGet(id int) (interface{}, error)
|
||||
//GlobalCount()
|
||||
}
|
||||
|
||||
// nolint
|
||||
type DataCache interface {
|
||||
CacheGet(id int) (interface{}, error)
|
||||
CacheGetUnsafe(id int) (interface{}, error)
|
||||
CacheSet(item interface{}) error
|
||||
CacheAdd(item interface{}) error
|
||||
CacheAddUnsafe(item interface{}) error
|
||||
CacheRemove(id int) error
|
||||
CacheRemoveUnsafe(id int) error
|
||||
Reload(id int) error
|
||||
Flush()
|
||||
Length() int
|
||||
SetCapacity(capacity int)
|
||||
GetCapacity() int
|
||||
CacheGet(id int) (interface{}, error)
|
||||
CacheGetUnsafe(id int) (interface{}, error)
|
||||
CacheSet(item interface{}) error
|
||||
CacheAdd(item interface{}) error
|
||||
CacheAddUnsafe(item interface{}) error
|
||||
CacheRemove(id int) error
|
||||
CacheRemoveUnsafe(id int) error
|
||||
Reload(id int) error
|
||||
Flush()
|
||||
Length() int
|
||||
SetCapacity(capacity int)
|
||||
GetCapacity() int
|
||||
}
|
||||
|
316
common/common.go
316
common/common.go
@ -1,70 +1,42 @@
|
||||
/*
|
||||
*
|
||||
* Gosora Common Resources
|
||||
* Copyright Azareal 2018 - 2020
|
||||
*
|
||||
*/
|
||||
package common // import "git.tuxpa.in/a/gosora/common"
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"io"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"runtime/debug"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
meta "git.tuxpa.in/a/gosora/common/meta"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var SoftwareVersion = Version{Major: 0, Minor: 3, Patch: 0, Tag: "dev"}
|
||||
|
||||
var Meta meta.MetaStore
|
||||
|
||||
// nolint I don't want to write comments for each of these o.o
|
||||
const Hour int = 60 * 60
|
||||
const Day = Hour * 24
|
||||
const Week = Day * 7
|
||||
const Month = Day * 30
|
||||
const Year = Day * 365
|
||||
const Day int = Hour * 24
|
||||
const Week int = Day * 7
|
||||
const Month int = Day * 30
|
||||
const Year int = Day * 365
|
||||
const Kilobyte int = 1024
|
||||
const Megabyte = Kilobyte * 1024
|
||||
const Gigabyte = Megabyte * 1024
|
||||
const Terabyte = Gigabyte * 1024
|
||||
const Petabyte = Terabyte * 1024
|
||||
const Megabyte int = Kilobyte * 1024
|
||||
const Gigabyte int = Megabyte * 1024
|
||||
const Terabyte int = Gigabyte * 1024
|
||||
const Petabyte int = Terabyte * 1024
|
||||
|
||||
var StartTime time.Time
|
||||
var GzipStartEtag string
|
||||
var StartEtag string
|
||||
var TmplPtrMap = make(map[string]interface{})
|
||||
|
||||
// Anti-spam token with rotated key
|
||||
var JSTokenBox atomic.Value // TODO: Move this and some of these other globals somewhere else
|
||||
var SessionSigningKeyBox atomic.Value // For MFA to avoid hitting the database unneccessarily
|
||||
var SessionSigningKeyBox atomic.Value // For MFA to avoid hitting the database unneccesarily
|
||||
var OldSessionSigningKeyBox atomic.Value // Just in case we've signed with a key that's about to go stale so we don't annoy the user too much
|
||||
var IsDBDown int32 = 0 // 0 = false, 1 = true. this is value which should be manipulated with package atomic for representing whether the database is down so we don't spam the log with lots of redundant errors
|
||||
|
||||
// ErrNoRows is an alias of sql.ErrNoRows, just in case we end up with non-database/sql datastores
|
||||
var ErrNoRows = sql.ErrNoRows
|
||||
|
||||
//var StrSlicePool sync.Pool
|
||||
|
||||
// ? - Make this more customisable?
|
||||
/*var ExternalSites = map[string]string{
|
||||
"YT": "https://www.youtube.com/",
|
||||
}*/
|
||||
|
||||
// TODO: Make this more customisable
|
||||
var SpammyDomainBits = []string{"porn", "sex", "acup", "nude", "milf", "tits", "vape", "busty", "kink", "lingerie", "strapon", "problog", "fet", "xblog", "blogin", "blognetwork", "relayblog"}
|
||||
|
||||
var Chrome, Firefox int // ! Temporary Hack for http push
|
||||
var SimpleBots []int // ! Temporary hack to stop semrush, ahrefs, python bots and other from wasting resources
|
||||
var ExternalSites = map[string]string{
|
||||
"YT": "https://www.youtube.com/",
|
||||
}
|
||||
|
||||
type StringList []string
|
||||
|
||||
@ -72,266 +44,86 @@ type StringList []string
|
||||
// TODO: Let admins manage this from the Control Panel
|
||||
// apng is commented out for now, as we have no way of re-encoding it into a smaller file
|
||||
var AllowedFileExts = StringList{
|
||||
"png", "jpg", "jpe", "jpeg", "jif", "jfi", "jfif", "svg", "bmp", "gif", "tiff", "tif", "webp", "apng", "avif", "flif", "heif", "heic", "bpg", // images (encodable) + apng (browser support) + bpg + avif + flif + heif / heic
|
||||
"png", "jpg", "jpeg", "svg", "bmp", "gif", "tif", "webp", /*"apng",*/ // images
|
||||
|
||||
"txt", "xml", "json", "yaml", "toml", "ini", "md", "html", "rtf", "js", "py", "rb", "css", "scss", "less", "eqcss", "pcss", "java", "ts", "cs", "c", "cc", "cpp", "cxx", "C", "c++", "h", "hh", "hpp", "hxx", "h++", "rs", "rlib", "htaccess", "gitignore", /*"go","php",*/ // text
|
||||
"txt", "xml", "json", "yaml", "toml", "ini", "md", "html", "rtf", "js", "py", "rb", "css", "scss", "less", "eqcss", "pcss", "java", "ts", "cs", "c", "cc", "cpp", "cxx", "C", "c++", "h", "hh", "hpp", "hxx", "h++", "rs", "rlib", "htaccess", "gitignore", /*"go","php",*/ // text
|
||||
|
||||
"wav", "mp3", "oga", "m4a", "flac", "ac3", "aac", "opus", // audio
|
||||
"mp3", "mp4", "avi", "wmv", "webm", // video
|
||||
|
||||
"mp4", "avi", "ogg", "ogv", "ogx", "wmv", "webm", "flv", "f4v", "xvid", "mov", "movie", "qt", // video
|
||||
|
||||
"otf", "woff2", "woff", "ttf", "eot", // fonts
|
||||
|
||||
"bz2", "zip", "zipx", "gz", "7z", "tar", "cab", "rar", "kgb", "pea", "xz", "zz", "tgz", "xpi", // archives
|
||||
|
||||
"docx", "pdf", // documents
|
||||
"otf", "woff2", "woff", "ttf", "eot", // fonts
|
||||
}
|
||||
var ImageFileExts = StringList{
|
||||
"png", "jpg", "jpe", "jpeg", "jif", "jfi", "jfif", "svg", "bmp", "gif", "tiff", "tif", "webp", /* "apng", "bpg", "avif", */
|
||||
}
|
||||
var TextFileExts = StringList{
|
||||
"txt", "xml", "json", "yaml", "toml", "ini", "md", "html", "rtf", "js", "py", "rb", "css", "scss", "less", "eqcss", "pcss", "java", "ts", "cs", "c", "cc", "cpp", "cxx", "C", "c++", "h", "hh", "hpp", "hxx", "h++", "rs", "rlib", "htaccess", "gitignore", /*"go","php",*/
|
||||
}
|
||||
var VideoFileExts = StringList{
|
||||
"mp4", "avi", "ogg", "ogv", "ogx", "wmv", "webm", "flv", "f4v", "xvid", "mov", "movie", "qt",
|
||||
}
|
||||
var WebVideoFileExts = StringList{
|
||||
"mp4", "avi", "ogg", "ogv", "webm",
|
||||
}
|
||||
var WebAudioFileExts = StringList{
|
||||
"wav", "mp3", "oga", "m4a", "flac",
|
||||
"png", "jpg", "jpeg", "svg", "bmp", "gif", "tif", "webp", /* "apng",*/
|
||||
}
|
||||
var ArchiveFileExts = StringList{
|
||||
"bz2", "zip", "zipx", "gz", "7z", "tar", "cab", "rar", "kgb", "pea", "xz", "zz", "tgz", "xpi",
|
||||
"bz2", "zip", "gz", "7z", "tar", "cab",
|
||||
}
|
||||
var ExecutableFileExts = StringList{
|
||||
"exe", "jar", "phar", "shar", "iso", "apk", "deb",
|
||||
"exe", "jar", "phar", "shar", "iso",
|
||||
}
|
||||
|
||||
func init() {
|
||||
JSTokenBox.Store("")
|
||||
SessionSigningKeyBox.Store("")
|
||||
OldSessionSigningKeyBox.Store("")
|
||||
JSTokenBox.Store("")
|
||||
SessionSigningKeyBox.Store("")
|
||||
OldSessionSigningKeyBox.Store("")
|
||||
}
|
||||
|
||||
// TODO: Write a test for this
|
||||
func (sl StringList) Contains(needle string) bool {
|
||||
for _, it := range sl {
|
||||
if it == needle {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
func (slice StringList) Contains(needle string) bool {
|
||||
for _, item := range slice {
|
||||
if item == needle {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/*var DbTables []string
|
||||
var TableToID = make(map[string]int)
|
||||
var IDToTable = make(map[int]string)
|
||||
|
||||
func InitTables(acc *qgen.Accumulator) error {
|
||||
stmt := acc.Select("tables").Columns("id,name").Prepare()
|
||||
if e := acc.FirstError(); e != nil {
|
||||
return e
|
||||
}
|
||||
return eachall(stmt, func(r *sql.Rows) error {
|
||||
var id int
|
||||
var name string
|
||||
if e := r.Scan(&id, &name); e != nil {
|
||||
return e
|
||||
}
|
||||
TableToID[name] = id
|
||||
IDToTable[id] = name
|
||||
return nil
|
||||
})
|
||||
}*/
|
||||
|
||||
type dbInits []func(acc *qgen.Accumulator) error
|
||||
|
||||
var DbInits dbInits
|
||||
|
||||
func (inits dbInits) Run() error {
|
||||
for _, i := range inits {
|
||||
if e := i(qgen.NewAcc()); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return nil
|
||||
for _, init := range inits {
|
||||
err := init(qgen.NewAcc())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (inits dbInits) Add(i ...func(acc *qgen.Accumulator) error) {
|
||||
DbInits = dbInits(append(DbInits, i...))
|
||||
func (inits dbInits) Add(init ...func(acc *qgen.Accumulator) error) {
|
||||
DbInits = dbInits(append(DbInits, init...))
|
||||
}
|
||||
|
||||
// TODO: Add a graceful shutdown function
|
||||
func StoppedServer(msg ...interface{}) {
|
||||
//log.Print("stopped server")
|
||||
StopServerChan <- msg
|
||||
//log.Print("stopped server")
|
||||
StopServerChan <- msg
|
||||
}
|
||||
|
||||
var StopServerChan = make(chan []interface{})
|
||||
|
||||
var LogWriter = io.MultiWriter(os.Stdout)
|
||||
var ErrLogWriter = io.MultiWriter(os.Stderr)
|
||||
var ErrLogger = log.New(os.Stderr, "", log.LstdFlags)
|
||||
|
||||
func DebugDetail(args ...interface{}) {
|
||||
if Dev.SuperDebug {
|
||||
log.Print(args...)
|
||||
}
|
||||
if Dev.SuperDebug {
|
||||
log.Print(args...)
|
||||
}
|
||||
}
|
||||
|
||||
func DebugDetailf(str string, args ...interface{}) {
|
||||
if Dev.SuperDebug {
|
||||
log.Printf(str, args...)
|
||||
}
|
||||
if Dev.SuperDebug {
|
||||
log.Printf(str, args...)
|
||||
}
|
||||
}
|
||||
|
||||
func DebugLog(args ...interface{}) {
|
||||
if Dev.DebugMode {
|
||||
log.Print(args...)
|
||||
}
|
||||
if Dev.DebugMode {
|
||||
log.Print(args...)
|
||||
}
|
||||
}
|
||||
|
||||
func DebugLogf(str string, args ...interface{}) {
|
||||
if Dev.DebugMode {
|
||||
log.Printf(str, args...)
|
||||
}
|
||||
}
|
||||
|
||||
func Log(args ...interface{}) {
|
||||
log.Print(args...)
|
||||
}
|
||||
func Logf(str string, args ...interface{}) {
|
||||
log.Printf(str, args...)
|
||||
}
|
||||
func Err(args ...interface{}) {
|
||||
ErrLogger.Print(args...)
|
||||
}
|
||||
|
||||
func Count(stmt *sql.Stmt) (count int) {
|
||||
e := stmt.QueryRow().Scan(&count)
|
||||
if e != nil {
|
||||
LogError(e)
|
||||
}
|
||||
return count
|
||||
}
|
||||
func Countf(stmt *sql.Stmt, args ...interface{}) (count int) {
|
||||
e := stmt.QueryRow(args...).Scan(&count)
|
||||
if e != nil {
|
||||
LogError(e)
|
||||
}
|
||||
return count
|
||||
}
|
||||
func Createf(stmt *sql.Stmt, args ...interface{}) (id int, e error) {
|
||||
res, e := stmt.Exec(args...)
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
id64, e := res.LastInsertId()
|
||||
return int(id64), e
|
||||
}
|
||||
|
||||
func eachall(stmt *sql.Stmt, f func(r *sql.Rows) error) error {
|
||||
rows, e := stmt.Query()
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
if e := f(rows); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return rows.Err()
|
||||
}
|
||||
|
||||
var qcache = []string{0: "?", 1: "?,?", 2: "?,?,?", 3: "?,?,?,?", 4: "?,?,?,?,?", 5: "?,?,?,?,?,?", 6: "?,?,?,?,?,?,?", 7: "?,?,?,?,?,?,?,?", 8: "?,?,?,?,?,?,?,?,?"}
|
||||
|
||||
func inqbuild(ids []int) ([]interface{}, string) {
|
||||
if len(ids) < 8 {
|
||||
idList := make([]interface{}, len(ids))
|
||||
for i, id := range ids {
|
||||
idList[i] = strconv.Itoa(id)
|
||||
}
|
||||
return idList, qcache[len(ids)-1]
|
||||
}
|
||||
|
||||
var sb strings.Builder
|
||||
sb.Grow((len(ids) * 2) - 1)
|
||||
idList := make([]interface{}, len(ids))
|
||||
for i, id := range ids {
|
||||
idList[i] = strconv.Itoa(id)
|
||||
if i == 0 {
|
||||
sb.WriteRune('?')
|
||||
} else {
|
||||
sb.WriteString(",?")
|
||||
}
|
||||
}
|
||||
return idList, sb.String()
|
||||
}
|
||||
|
||||
func inqbuild2(count int) string {
|
||||
if count <= 8 {
|
||||
return qcache[count-1]
|
||||
}
|
||||
var sb strings.Builder
|
||||
sb.Grow((count * 2) - 1)
|
||||
for i := 0; i < count; i++ {
|
||||
if i == 0 {
|
||||
sb.WriteRune('?')
|
||||
} else {
|
||||
sb.WriteString(",?")
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func inqbuildstr(strs []string) ([]interface{}, string) {
|
||||
if len(strs) < 8 {
|
||||
idList := make([]interface{}, len(strs))
|
||||
for i, id := range strs {
|
||||
idList[i] = id
|
||||
}
|
||||
return idList, qcache[len(strs)-1]
|
||||
}
|
||||
|
||||
var sb strings.Builder
|
||||
sb.Grow((len(strs) * 2) - 1)
|
||||
idList := make([]interface{}, len(strs))
|
||||
for i, id := range strs {
|
||||
idList[i] = id
|
||||
if i == 0 {
|
||||
sb.WriteRune('?')
|
||||
} else {
|
||||
sb.WriteString(",?")
|
||||
}
|
||||
}
|
||||
return idList, sb.String()
|
||||
}
|
||||
|
||||
var ConnWatch = &ConnWatcher{}
|
||||
|
||||
type ConnWatcher struct {
|
||||
n int64
|
||||
}
|
||||
|
||||
func (cw *ConnWatcher) StateChange(conn net.Conn, state http.ConnState) {
|
||||
switch state {
|
||||
case http.StateNew:
|
||||
atomic.AddInt64(&cw.n, 1)
|
||||
case http.StateHijacked, http.StateClosed:
|
||||
atomic.AddInt64(&cw.n, -1)
|
||||
}
|
||||
}
|
||||
|
||||
func (cw *ConnWatcher) Count() int {
|
||||
return int(atomic.LoadInt64(&cw.n))
|
||||
}
|
||||
|
||||
func EatPanics() {
|
||||
if r := recover(); r != nil {
|
||||
log.Print(r)
|
||||
debug.PrintStack()
|
||||
log.Fatal("Fatal error.")
|
||||
}
|
||||
if Dev.DebugMode {
|
||||
log.Printf(str, args...)
|
||||
}
|
||||
}
|
||||
|
@ -1,628 +0,0 @@
|
||||
// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT.
|
||||
|
||||
package common
|
||||
|
||||
import (
|
||||
json "encoding/json"
|
||||
easyjson "github.com/mailru/easyjson"
|
||||
jlexer "github.com/mailru/easyjson/jlexer"
|
||||
jwriter "github.com/mailru/easyjson/jwriter"
|
||||
)
|
||||
|
||||
// suppress unused package warning
|
||||
var (
|
||||
_ *json.RawMessage
|
||||
_ *jlexer.Lexer
|
||||
_ *jwriter.Writer
|
||||
_ easyjson.Marshaler
|
||||
)
|
||||
|
||||
func easyjsonC803d3e7DecodeGithubComAzarealGosoraCommon(in *jlexer.Lexer, out *WsTopicList) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "Topics":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Topics = nil
|
||||
} else {
|
||||
in.Delim('[')
|
||||
if out.Topics == nil {
|
||||
if !in.IsDelim(']') {
|
||||
out.Topics = make([]*WsTopicsRow, 0, 8)
|
||||
} else {
|
||||
out.Topics = []*WsTopicsRow{}
|
||||
}
|
||||
} else {
|
||||
out.Topics = (out.Topics)[:0]
|
||||
}
|
||||
for !in.IsDelim(']') {
|
||||
var v1 *WsTopicsRow
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
v1 = nil
|
||||
} else {
|
||||
if v1 == nil {
|
||||
v1 = new(WsTopicsRow)
|
||||
}
|
||||
easyjsonC803d3e7DecodeGithubComAzarealGosoraCommon1(in, v1)
|
||||
}
|
||||
out.Topics = append(out.Topics, v1)
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim(']')
|
||||
}
|
||||
case "LastPage":
|
||||
out.LastPage = int(in.Int())
|
||||
case "LastUpdate":
|
||||
out.LastUpdate = int64(in.Int64())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjsonC803d3e7EncodeGithubComAzarealGosoraCommon(out *jwriter.Writer, in WsTopicList) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"Topics\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.Topics == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
out.RawByte('[')
|
||||
for v2, v3 := range in.Topics {
|
||||
if v2 > 0 {
|
||||
out.RawByte(',')
|
||||
}
|
||||
if v3 == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjsonC803d3e7EncodeGithubComAzarealGosoraCommon1(out, *v3)
|
||||
}
|
||||
}
|
||||
out.RawByte(']')
|
||||
}
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"LastPage\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.LastPage))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"LastUpdate\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int64(int64(in.LastUpdate))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
|
||||
// MarshalJSON supports json.Marshaler interface
|
||||
func (v WsTopicList) MarshalJSON() ([]byte, error) {
|
||||
w := jwriter.Writer{}
|
||||
easyjsonC803d3e7EncodeGithubComAzarealGosoraCommon(&w, v)
|
||||
return w.Buffer.BuildBytes(), w.Error
|
||||
}
|
||||
|
||||
// MarshalEasyJSON supports easyjson.Marshaler interface
|
||||
func (v WsTopicList) MarshalEasyJSON(w *jwriter.Writer) {
|
||||
easyjsonC803d3e7EncodeGithubComAzarealGosoraCommon(w, v)
|
||||
}
|
||||
|
||||
// UnmarshalJSON supports json.Unmarshaler interface
|
||||
func (v *WsTopicList) UnmarshalJSON(data []byte) error {
|
||||
r := jlexer.Lexer{Data: data}
|
||||
easyjsonC803d3e7DecodeGithubComAzarealGosoraCommon(&r, v)
|
||||
return r.Error()
|
||||
}
|
||||
|
||||
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
|
||||
func (v *WsTopicList) UnmarshalEasyJSON(l *jlexer.Lexer) {
|
||||
easyjsonC803d3e7DecodeGithubComAzarealGosoraCommon(l, v)
|
||||
}
|
||||
func easyjsonC803d3e7DecodeGithubComAzarealGosoraCommon1(in *jlexer.Lexer, out *WsTopicsRow) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "ID":
|
||||
out.ID = int(in.Int())
|
||||
case "Link":
|
||||
out.Link = string(in.String())
|
||||
case "Title":
|
||||
out.Title = string(in.String())
|
||||
case "CreatedBy":
|
||||
out.CreatedBy = int(in.Int())
|
||||
case "IsClosed":
|
||||
out.IsClosed = bool(in.Bool())
|
||||
case "Sticky":
|
||||
out.Sticky = bool(in.Bool())
|
||||
case "CreatedAt":
|
||||
if data := in.Raw(); in.Ok() {
|
||||
in.AddError((out.CreatedAt).UnmarshalJSON(data))
|
||||
}
|
||||
case "LastReplyAt":
|
||||
if data := in.Raw(); in.Ok() {
|
||||
in.AddError((out.LastReplyAt).UnmarshalJSON(data))
|
||||
}
|
||||
case "RelativeLastReplyAt":
|
||||
out.RelativeLastReplyAt = string(in.String())
|
||||
case "LastReplyBy":
|
||||
out.LastReplyBy = int(in.Int())
|
||||
case "LastReplyID":
|
||||
out.LastReplyID = int(in.Int())
|
||||
case "ParentID":
|
||||
out.ParentID = int(in.Int())
|
||||
case "ViewCount":
|
||||
out.ViewCount = int64(in.Int64())
|
||||
case "PostCount":
|
||||
out.PostCount = int(in.Int())
|
||||
case "LikeCount":
|
||||
out.LikeCount = int(in.Int())
|
||||
case "AttachCount":
|
||||
out.AttachCount = int(in.Int())
|
||||
case "ClassName":
|
||||
out.ClassName = string(in.String())
|
||||
case "Creator":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.Creator = nil
|
||||
} else {
|
||||
if out.Creator == nil {
|
||||
out.Creator = new(WsJSONUser)
|
||||
}
|
||||
easyjsonC803d3e7DecodeGithubComAzarealGosoraCommon2(in, out.Creator)
|
||||
}
|
||||
case "LastUser":
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
out.LastUser = nil
|
||||
} else {
|
||||
if out.LastUser == nil {
|
||||
out.LastUser = new(WsJSONUser)
|
||||
}
|
||||
easyjsonC803d3e7DecodeGithubComAzarealGosoraCommon2(in, out.LastUser)
|
||||
}
|
||||
case "ForumName":
|
||||
out.ForumName = string(in.String())
|
||||
case "ForumLink":
|
||||
out.ForumLink = string(in.String())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjsonC803d3e7EncodeGithubComAzarealGosoraCommon1(out *jwriter.Writer, in WsTopicsRow) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"ID\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.ID))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Link\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Link))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Title\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Title))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"CreatedBy\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.CreatedBy))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"IsClosed\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Bool(bool(in.IsClosed))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Sticky\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Bool(bool(in.Sticky))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"CreatedAt\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Raw((in.CreatedAt).MarshalJSON())
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"LastReplyAt\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Raw((in.LastReplyAt).MarshalJSON())
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"RelativeLastReplyAt\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.RelativeLastReplyAt))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"LastReplyBy\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.LastReplyBy))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"LastReplyID\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.LastReplyID))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"ParentID\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.ParentID))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"ViewCount\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int64(int64(in.ViewCount))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"PostCount\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.PostCount))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"LikeCount\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.LikeCount))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"AttachCount\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.AttachCount))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"ClassName\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.ClassName))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Creator\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.Creator == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjsonC803d3e7EncodeGithubComAzarealGosoraCommon2(out, *in.Creator)
|
||||
}
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"LastUser\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
if in.LastUser == nil {
|
||||
out.RawString("null")
|
||||
} else {
|
||||
easyjsonC803d3e7EncodeGithubComAzarealGosoraCommon2(out, *in.LastUser)
|
||||
}
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"ForumName\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.ForumName))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"ForumLink\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.ForumLink))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
||||
func easyjsonC803d3e7DecodeGithubComAzarealGosoraCommon2(in *jlexer.Lexer, out *WsJSONUser) {
|
||||
isTopLevel := in.IsStart()
|
||||
if in.IsNull() {
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
in.Skip()
|
||||
return
|
||||
}
|
||||
in.Delim('{')
|
||||
for !in.IsDelim('}') {
|
||||
key := in.UnsafeString()
|
||||
in.WantColon()
|
||||
if in.IsNull() {
|
||||
in.Skip()
|
||||
in.WantComma()
|
||||
continue
|
||||
}
|
||||
switch key {
|
||||
case "ID":
|
||||
out.ID = int(in.Int())
|
||||
case "Link":
|
||||
out.Link = string(in.String())
|
||||
case "Name":
|
||||
out.Name = string(in.String())
|
||||
case "Group":
|
||||
out.Group = int(in.Int())
|
||||
case "IsMod":
|
||||
out.IsMod = bool(in.Bool())
|
||||
case "Avatar":
|
||||
out.Avatar = string(in.String())
|
||||
case "MicroAvatar":
|
||||
out.MicroAvatar = string(in.String())
|
||||
case "Level":
|
||||
out.Level = int(in.Int())
|
||||
case "Score":
|
||||
out.Score = int(in.Int())
|
||||
case "Liked":
|
||||
out.Liked = int(in.Int())
|
||||
default:
|
||||
in.SkipRecursive()
|
||||
}
|
||||
in.WantComma()
|
||||
}
|
||||
in.Delim('}')
|
||||
if isTopLevel {
|
||||
in.Consumed()
|
||||
}
|
||||
}
|
||||
func easyjsonC803d3e7EncodeGithubComAzarealGosoraCommon2(out *jwriter.Writer, in WsJSONUser) {
|
||||
out.RawByte('{')
|
||||
first := true
|
||||
_ = first
|
||||
{
|
||||
const prefix string = ",\"ID\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.ID))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Link\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Link))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Name\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Name))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Group\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.Group))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"IsMod\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Bool(bool(in.IsMod))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Avatar\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.Avatar))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"MicroAvatar\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.String(string(in.MicroAvatar))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Level\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.Level))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Score\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.Score))
|
||||
}
|
||||
{
|
||||
const prefix string = ",\"Liked\":"
|
||||
if first {
|
||||
first = false
|
||||
out.RawString(prefix[1:])
|
||||
} else {
|
||||
out.RawString(prefix)
|
||||
}
|
||||
out.Int(int(in.Liked))
|
||||
}
|
||||
out.RawByte('}')
|
||||
}
|
@ -1,371 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
//"log"
|
||||
|
||||
"database/sql"
|
||||
"strconv"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var Convos ConversationStore
|
||||
var convoStmts ConvoStmts
|
||||
|
||||
type ConvoStmts struct {
|
||||
fetchPost *sql.Stmt
|
||||
getPosts *sql.Stmt
|
||||
countPosts *sql.Stmt
|
||||
edit *sql.Stmt
|
||||
create *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
has *sql.Stmt
|
||||
|
||||
editPost *sql.Stmt
|
||||
createPost *sql.Stmt
|
||||
deletePost *sql.Stmt
|
||||
|
||||
getUsers *sql.Stmt
|
||||
}
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
cpo := "conversations_posts"
|
||||
convoStmts = ConvoStmts{
|
||||
fetchPost: acc.Select(cpo).Columns("cid,body,post,createdBy").Where("pid=?").Prepare(),
|
||||
getPosts: acc.Select(cpo).Columns("pid,body,post,createdBy").Where("cid=?").Limit("?,?").Prepare(),
|
||||
countPosts: acc.Count(cpo).Where("cid=?").Prepare(),
|
||||
edit: acc.Update("conversations").Set("lastReplyBy=?,lastReplyAt=?").Where("cid=?").Prepare(),
|
||||
create: acc.Insert("conversations").Columns("createdAt,lastReplyAt").Fields("UTC_TIMESTAMP(),UTC_TIMESTAMP()").Prepare(),
|
||||
has: acc.Count("conversations_participants").Where("uid=? AND cid=?").Prepare(),
|
||||
|
||||
editPost: acc.Update(cpo).Set("body=?,post=?").Where("pid=?").Prepare(),
|
||||
createPost: acc.Insert(cpo).Columns("cid,body,post,createdBy").Fields("?,?,?,?").Prepare(),
|
||||
deletePost: acc.Delete(cpo).Where("pid=?").Prepare(),
|
||||
|
||||
getUsers: acc.Select("conversations_participants").Columns("uid").Where("cid=?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
type Conversation struct {
|
||||
ID int
|
||||
Link string
|
||||
CreatedBy int
|
||||
CreatedAt time.Time
|
||||
LastReplyBy int
|
||||
LastReplyAt time.Time
|
||||
}
|
||||
|
||||
func (co *Conversation) Posts(offset, itemsPerPage int) (posts []*ConversationPost, err error) {
|
||||
rows, err := convoStmts.getPosts.Query(co.ID, offset, itemsPerPage)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
p := &ConversationPost{CID: co.ID}
|
||||
err := rows.Scan(&p.ID, &p.Body, &p.Post, &p.CreatedBy)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p, err = ConvoPostProcess.OnLoad(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
posts = append(posts, p)
|
||||
}
|
||||
|
||||
return posts, rows.Err()
|
||||
}
|
||||
|
||||
func (co *Conversation) PostsCount() (count int) {
|
||||
return Countf(convoStmts.countPosts, co.ID)
|
||||
}
|
||||
|
||||
func (co *Conversation) Uids() (ids []int, err error) {
|
||||
rows, e := convoStmts.getUsers.Query(co.ID)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
var id int
|
||||
if e := rows.Scan(&id); e != nil {
|
||||
return nil, e
|
||||
}
|
||||
ids = append(ids, id)
|
||||
}
|
||||
return ids, rows.Err()
|
||||
}
|
||||
|
||||
func (co *Conversation) Has(uid int) (in bool) {
|
||||
return Countf(convoStmts.has, uid, co.ID) > 0
|
||||
}
|
||||
|
||||
func (co *Conversation) Update() error {
|
||||
_, err := convoStmts.edit.Exec(co.CreatedAt, co.LastReplyBy, co.LastReplyAt, co.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (co *Conversation) Create() (int, error) {
|
||||
res, err := convoStmts.create.Exec()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
lastID, err := res.LastInsertId()
|
||||
return int(lastID), err
|
||||
}
|
||||
|
||||
func BuildConvoURL(coid int) string {
|
||||
return "/user/convo/" + strconv.Itoa(coid)
|
||||
}
|
||||
|
||||
type ConversationExtra struct {
|
||||
*Conversation
|
||||
Users []*User
|
||||
}
|
||||
|
||||
type ConversationStore interface {
|
||||
Get(id int) (*Conversation, error)
|
||||
GetUser(uid, offset int) (cos []*Conversation, err error)
|
||||
GetUserExtra(uid, offset int) (cos []*ConversationExtra, err error)
|
||||
GetUserCount(uid int) (count int)
|
||||
Delete(id int) error
|
||||
Count() (count int)
|
||||
Create(content string, createdBy int, participants []int) (int, error)
|
||||
}
|
||||
|
||||
type DefaultConversationStore struct {
|
||||
get *sql.Stmt
|
||||
getUser *sql.Stmt
|
||||
getUserCount *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
deletePosts *sql.Stmt
|
||||
deleteParticipants *sql.Stmt
|
||||
create *sql.Stmt
|
||||
addParticipant *sql.Stmt
|
||||
count *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultConversationStore(acc *qgen.Accumulator) (*DefaultConversationStore, error) {
|
||||
co := "conversations"
|
||||
return &DefaultConversationStore{
|
||||
get: acc.Select(co).Columns("createdBy,createdAt,lastReplyBy,lastReplyAt").Where("cid=?").Prepare(),
|
||||
getUser: acc.SimpleInnerJoin("conversations_participants AS cp", "conversations AS c", "cp.cid, c.createdBy, c.createdAt, c.lastReplyBy, c.lastReplyAt", "cp.cid=c.cid", "cp.uid=?", "c.lastReplyAt DESC, c.createdAt DESC, c.cid DESC", "?,?"),
|
||||
getUserCount: acc.Count("conversations_participants").Where("uid=?").Prepare(),
|
||||
delete: acc.Delete(co).Where("cid=?").Prepare(),
|
||||
deletePosts: acc.Delete("conversations_posts").Where("cid=?").Prepare(),
|
||||
deleteParticipants: acc.Delete("conversations_participants").Where("cid=?").Prepare(),
|
||||
create: acc.Insert(co).Columns("createdBy,createdAt,lastReplyBy,lastReplyAt").Fields("?,UTC_TIMESTAMP(),?,UTC_TIMESTAMP()").Prepare(),
|
||||
addParticipant: acc.Insert("conversations_participants").Columns("uid,cid").Fields("?,?").Prepare(),
|
||||
count: acc.Count(co).Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultConversationStore) Get(id int) (*Conversation, error) {
|
||||
co := &Conversation{ID: id}
|
||||
err := s.get.QueryRow(id).Scan(&co.CreatedBy, &co.CreatedAt, &co.LastReplyBy, &co.LastReplyAt)
|
||||
co.Link = BuildConvoURL(co.ID)
|
||||
return co, err
|
||||
}
|
||||
|
||||
func (s *DefaultConversationStore) GetUser(uid, offset int) (cos []*Conversation, err error) {
|
||||
rows, err := s.getUser.Query(uid, offset, Config.ItemsPerPage)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
co := &Conversation{}
|
||||
err := rows.Scan(&co.ID, &co.CreatedBy, &co.CreatedAt, &co.LastReplyBy, &co.LastReplyAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
co.Link = BuildConvoURL(co.ID)
|
||||
cos = append(cos, co)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(cos) == 0 {
|
||||
err = sql.ErrNoRows
|
||||
}
|
||||
return cos, err
|
||||
}
|
||||
|
||||
func (s *DefaultConversationStore) GetUserExtra(uid, offset int) (cos []*ConversationExtra, err error) {
|
||||
raw, err := s.GetUser(uid, offset)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//log.Printf("raw: %+v\n", raw)
|
||||
|
||||
if len(raw) == 1 {
|
||||
//log.Print("r0b2")
|
||||
uids, err := raw[0].Uids()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//log.Println("r1b2")
|
||||
umap, err := Users.BulkGetMap(uids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//log.Println("r2b2")
|
||||
users := make([]*User, len(umap))
|
||||
var i int
|
||||
for _, user := range umap {
|
||||
users[i] = user
|
||||
i++
|
||||
}
|
||||
return []*ConversationExtra{{raw[0], users}}, nil
|
||||
}
|
||||
//log.Println("1")
|
||||
|
||||
cmap := make(map[int]*ConversationExtra, len(raw))
|
||||
for _, co := range raw {
|
||||
cmap[co.ID] = &ConversationExtra{co, nil}
|
||||
}
|
||||
|
||||
// TODO: Use inqbuild for this or a similar function
|
||||
var q string
|
||||
idList := make([]interface{}, len(raw))
|
||||
for i, co := range raw {
|
||||
if i == 0 {
|
||||
q = "?"
|
||||
} else {
|
||||
q += ",?"
|
||||
}
|
||||
idList[i] = strconv.Itoa(co.ID)
|
||||
}
|
||||
|
||||
rows, err := qgen.NewAcc().Select("conversations_participants").Columns("uid,cid").Where("cid IN(" + q + ")").Query(idList...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
//log.Println("2")
|
||||
|
||||
idmap := make(map[int][]int) // cid: []uid
|
||||
puidmap := make(map[int]struct{})
|
||||
for rows.Next() {
|
||||
var uid, cid int
|
||||
err := rows.Scan(&uid, &cid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
idmap[cid] = append(idmap[cid], uid)
|
||||
puidmap[uid] = struct{}{}
|
||||
}
|
||||
if err = rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//log.Println("3")
|
||||
//log.Printf("idmap: %+v\n", idmap)
|
||||
//log.Printf("puidmap: %+v\n",puidmap)
|
||||
|
||||
puids := make([]int, len(puidmap))
|
||||
var i int
|
||||
for puid, _ := range puidmap {
|
||||
puids[i] = puid
|
||||
i++
|
||||
}
|
||||
umap, err := Users.BulkGetMap(puids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
//log.Println("4")
|
||||
//log.Printf("umap: %+v\n", umap)
|
||||
for cid, uids := range idmap {
|
||||
co := cmap[cid]
|
||||
for _, uid := range uids {
|
||||
co.Users = append(co.Users, umap[uid])
|
||||
}
|
||||
//log.Printf("co.Conversation: %+v\n", co.Conversation)
|
||||
//log.Printf("co.Users: %+v\n", co.Users)
|
||||
cmap[cid] = co
|
||||
}
|
||||
//log.Printf("cmap: %+v\n", cmap)
|
||||
for _, ra := range raw {
|
||||
cos = append(cos, cmap[ra.ID])
|
||||
}
|
||||
//log.Printf("cos: %+v\n", cos)
|
||||
|
||||
return cos, rows.Err()
|
||||
}
|
||||
|
||||
func (s *DefaultConversationStore) GetUserCount(uid int) (count int) {
|
||||
err := s.getUserCount.QueryRow(uid).Scan(&count)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// TODO: Use a foreign key or transaction
|
||||
func (s *DefaultConversationStore) Delete(id int) error {
|
||||
_, err := s.delete.Exec(id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = s.deletePosts.Exec(id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = s.deleteParticipants.Exec(id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *DefaultConversationStore) Create(content string, createdBy int, participants []int) (int, error) {
|
||||
if len(participants) == 0 {
|
||||
return 0, errors.New("no participants set")
|
||||
}
|
||||
res, err := s.create.Exec(createdBy, createdBy)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
lastID, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
post := &ConversationPost{CID: int(lastID), Body: content, CreatedBy: createdBy}
|
||||
_, err = post.Create()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
for _, p := range participants {
|
||||
if p == createdBy {
|
||||
continue
|
||||
}
|
||||
_, err := s.addParticipant.Exec(p, lastID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
_, err = s.addParticipant.Exec(createdBy, lastID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return int(lastID), err
|
||||
}
|
||||
|
||||
// Count returns the total number of topics on these forums
|
||||
func (s *DefaultConversationStore) Count() (count int) {
|
||||
err := s.count.QueryRow().Scan(&count)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return count
|
||||
}
|
@ -1,142 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"crypto/rand"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
)
|
||||
|
||||
var ConvoPostProcess ConvoPostProcessor = NewDefaultConvoPostProcessor()
|
||||
|
||||
type ConvoPostProcessor interface {
|
||||
OnLoad(co *ConversationPost) (*ConversationPost, error)
|
||||
OnSave(co *ConversationPost) (*ConversationPost, error)
|
||||
}
|
||||
|
||||
type DefaultConvoPostProcessor struct {
|
||||
}
|
||||
|
||||
func NewDefaultConvoPostProcessor() *DefaultConvoPostProcessor {
|
||||
return &DefaultConvoPostProcessor{}
|
||||
}
|
||||
|
||||
func (pr *DefaultConvoPostProcessor) OnLoad(co *ConversationPost) (*ConversationPost, error) {
|
||||
return co, nil
|
||||
}
|
||||
|
||||
func (pr *DefaultConvoPostProcessor) OnSave(co *ConversationPost) (*ConversationPost, error) {
|
||||
return co, nil
|
||||
}
|
||||
|
||||
type AesConvoPostProcessor struct {
|
||||
}
|
||||
|
||||
func NewAesConvoPostProcessor() *AesConvoPostProcessor {
|
||||
return &AesConvoPostProcessor{}
|
||||
}
|
||||
|
||||
func (pr *AesConvoPostProcessor) OnLoad(co *ConversationPost) (*ConversationPost, error) {
|
||||
if co.Post != "aes" {
|
||||
return co, nil
|
||||
}
|
||||
key, _ := hex.DecodeString(Config.ConvoKey)
|
||||
|
||||
ciphertext, err := hex.DecodeString(co.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
block, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
aesgcm, err := cipher.NewGCM(block)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nonceSize := aesgcm.NonceSize()
|
||||
if len(ciphertext) < nonceSize {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nonce, ciphertext := ciphertext[:nonceSize], ciphertext[nonceSize:]
|
||||
plaintext, err := aesgcm.Open(nil, nonce, ciphertext, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
lco := *co
|
||||
lco.Body = string(plaintext)
|
||||
return &lco, nil
|
||||
}
|
||||
|
||||
func (pr *AesConvoPostProcessor) OnSave(co *ConversationPost) (*ConversationPost, error) {
|
||||
key, _ := hex.DecodeString(Config.ConvoKey)
|
||||
block, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nonce := make([]byte, 12)
|
||||
if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
aesgcm, err := cipher.NewGCM(block)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ciphertext := aesgcm.Seal(nil, nonce, []byte(co.Body), nil)
|
||||
|
||||
lco := *co
|
||||
lco.Body = hex.EncodeToString(ciphertext)
|
||||
lco.Post = "aes"
|
||||
return &lco, nil
|
||||
}
|
||||
|
||||
type ConversationPost struct {
|
||||
ID int
|
||||
CID int
|
||||
Body string
|
||||
Post string // aes, ''
|
||||
CreatedBy int
|
||||
}
|
||||
|
||||
// TODO: Should we run OnLoad on this? Or maybe add a FetchMeta method to avoid having to decode the message when it's not necessary?
|
||||
func (co *ConversationPost) Fetch() error {
|
||||
return convoStmts.fetchPost.QueryRow(co.ID).Scan(&co.CID, &co.Body, &co.Post, &co.CreatedBy)
|
||||
}
|
||||
|
||||
func (co *ConversationPost) Update() error {
|
||||
lco, err := ConvoPostProcess.OnSave(co)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
//GetHookTable().VhookNoRet("convo_post_update", lco)
|
||||
_, err = convoStmts.editPost.Exec(lco.Body, lco.Post, lco.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (co *ConversationPost) Create() (int, error) {
|
||||
lco, err := ConvoPostProcess.OnSave(co)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
//GetHookTable().VhookNoRet("convo_post_create", lco)
|
||||
res, err := convoStmts.createPost.Exec(lco.CID, lco.Body, lco.Post, lco.CreatedBy)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
lastID, err := res.LastInsertId()
|
||||
return int(lastID), err
|
||||
}
|
||||
|
||||
func (co *ConversationPost) Delete() error {
|
||||
_, err := convoStmts.deletePost.Exec(co.ID)
|
||||
return err
|
||||
}
|
@ -2,57 +2,67 @@ package counters
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"sync/atomic"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
".."
|
||||
"../../query_gen/lib"
|
||||
)
|
||||
|
||||
var AgentViewCounter *DefaultAgentViewCounter
|
||||
|
||||
type DefaultAgentViewCounter struct {
|
||||
buckets []int64 //[AgentID]count
|
||||
insert *sql.Stmt
|
||||
agentBuckets []*RWMutexCounterBucket //[AgentID]count
|
||||
insert *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultAgentViewCounter(acc *qgen.Accumulator) (*DefaultAgentViewCounter, error) {
|
||||
co := &DefaultAgentViewCounter{
|
||||
buckets: make([]int64, len(agentMapEnum)),
|
||||
insert: acc.Insert("viewchunks_agents").Columns("count,createdAt,browser").Fields("?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
func NewDefaultAgentViewCounter() (*DefaultAgentViewCounter, error) {
|
||||
acc := qgen.NewAcc()
|
||||
var agentBuckets = make([]*RWMutexCounterBucket, len(agentMapEnum))
|
||||
for bucketID, _ := range agentBuckets {
|
||||
agentBuckets[bucketID] = &RWMutexCounterBucket{counter: 0}
|
||||
}
|
||||
c.Tasks.FifteenMin.Add(co.Tick)
|
||||
//c.Tasks.Sec.Add(co.Tick)
|
||||
c.Tasks.Shutdown.Add(co.Tick)
|
||||
return co, acc.FirstError()
|
||||
counter := &DefaultAgentViewCounter{
|
||||
agentBuckets: agentBuckets,
|
||||
insert: acc.Insert("viewchunks_agents").Columns("count, createdAt, browser").Fields("?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
}
|
||||
common.AddScheduledFifteenMinuteTask(counter.Tick)
|
||||
//common.AddScheduledSecondTask(counter.Tick)
|
||||
common.AddShutdownTask(counter.Tick)
|
||||
return counter, acc.FirstError()
|
||||
}
|
||||
|
||||
func (co *DefaultAgentViewCounter) Tick() error {
|
||||
for id, _ := range co.buckets {
|
||||
count := atomic.SwapInt64(&co.buckets[id], 0)
|
||||
e := co.insertChunk(count, id) // TODO: Bulk insert for speed?
|
||||
if e != nil {
|
||||
return errors.Wrap(errors.WithStack(e), "agent counter")
|
||||
func (counter *DefaultAgentViewCounter) Tick() error {
|
||||
for agentID, agentBucket := range counter.agentBuckets {
|
||||
var count int
|
||||
agentBucket.RLock()
|
||||
count = agentBucket.counter
|
||||
agentBucket.counter = 0
|
||||
agentBucket.RUnlock()
|
||||
|
||||
err := counter.insertChunk(count, agentID) // TODO: Bulk insert for speed?
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (co *DefaultAgentViewCounter) insertChunk(count int64, agent int) error {
|
||||
func (counter *DefaultAgentViewCounter) insertChunk(count int, agent int) error {
|
||||
if count == 0 {
|
||||
return nil
|
||||
}
|
||||
agentName := reverseAgentMapEnum[agent]
|
||||
c.DebugLogf("Inserting a vchunk with a count of %d for agent %s (%d)", count, agentName, agent)
|
||||
_, e := co.insert.Exec(count, agentName)
|
||||
return e
|
||||
var agentName = reverseAgentMapEnum[agent]
|
||||
common.DebugLogf("Inserting a viewchunk with a count of %d for agent %s (%d)", count, agentName, agent)
|
||||
_, err := counter.insert.Exec(count, agentName)
|
||||
return err
|
||||
}
|
||||
|
||||
func (co *DefaultAgentViewCounter) Bump(agent int) {
|
||||
func (counter *DefaultAgentViewCounter) Bump(agent int) {
|
||||
// TODO: Test this check
|
||||
c.DebugDetail("buckets ", agent, ": ", co.buckets[agent])
|
||||
if len(co.buckets) <= agent || agent < 0 {
|
||||
common.DebugDetail("counter.agentBuckets[", agent, "]: ", counter.agentBuckets[agent])
|
||||
if len(counter.agentBuckets) <= agent || agent < 0 {
|
||||
return
|
||||
}
|
||||
atomic.AddInt64(&co.buckets[agent], 1)
|
||||
counter.agentBuckets[agent].Lock()
|
||||
counter.agentBuckets[agent].counter++
|
||||
counter.agentBuckets[agent].Unlock()
|
||||
}
|
||||
|
@ -40,13 +40,3 @@ type RWMutexCounterBucket struct {
|
||||
counter int
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
type MutexCounterBucket struct {
|
||||
counter int
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
type MutexCounter64Bucket struct {
|
||||
counter int64
|
||||
sync.Mutex
|
||||
}
|
@ -4,9 +4,8 @@ import (
|
||||
"database/sql"
|
||||
"sync"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
".."
|
||||
"../../query_gen/lib"
|
||||
)
|
||||
|
||||
var ForumViewCounter *DefaultForumViewCounter
|
||||
@ -24,84 +23,95 @@ type DefaultForumViewCounter struct {
|
||||
|
||||
func NewDefaultForumViewCounter() (*DefaultForumViewCounter, error) {
|
||||
acc := qgen.NewAcc()
|
||||
co := &DefaultForumViewCounter{
|
||||
counter := &DefaultForumViewCounter{
|
||||
oddMap: make(map[int]*RWMutexCounterBucket),
|
||||
evenMap: make(map[int]*RWMutexCounterBucket),
|
||||
insert: acc.Insert("viewchunks_forums").Columns("count,createdAt,forum").Fields("?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
insert: acc.Insert("viewchunks_forums").Columns("count, createdAt, forum").Fields("?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
}
|
||||
c.Tasks.FifteenMin.Add(co.Tick) // There could be a lot of routes, so we don't want to be running this every second
|
||||
//c.Tasks.Sec.Add(co.Tick)
|
||||
c.Tasks.Shutdown.Add(co.Tick)
|
||||
return co, acc.FirstError()
|
||||
common.AddScheduledFifteenMinuteTask(counter.Tick) // There could be a lot of routes, so we don't want to be running this every second
|
||||
//common.AddScheduledSecondTask(counter.Tick)
|
||||
common.AddShutdownTask(counter.Tick)
|
||||
return counter, acc.FirstError()
|
||||
}
|
||||
|
||||
func (co *DefaultForumViewCounter) Tick() error {
|
||||
cLoop := func(l *sync.RWMutex, m map[int]*RWMutexCounterBucket) error {
|
||||
l.RLock()
|
||||
for fid, f := range m {
|
||||
l.RUnlock()
|
||||
var count int
|
||||
f.RLock()
|
||||
count = f.counter
|
||||
f.RUnlock()
|
||||
// TODO: Only delete the bucket when it's zero to avoid hitting popular forums?
|
||||
l.Lock()
|
||||
delete(m, fid)
|
||||
l.Unlock()
|
||||
e := co.insertChunk(count, fid)
|
||||
if e != nil {
|
||||
return errors.Wrap(errors.WithStack(e), "forum counter")
|
||||
}
|
||||
l.RLock()
|
||||
func (counter *DefaultForumViewCounter) Tick() error {
|
||||
counter.oddLock.RLock()
|
||||
oddMap := counter.oddMap
|
||||
counter.oddLock.RUnlock()
|
||||
for forumID, forum := range oddMap {
|
||||
var count int
|
||||
forum.RLock()
|
||||
count = forum.counter
|
||||
forum.RUnlock()
|
||||
// TODO: Only delete the bucket when it's zero to avoid hitting popular forums?
|
||||
counter.oddLock.Lock()
|
||||
delete(counter.oddMap, forumID)
|
||||
counter.oddLock.Unlock()
|
||||
err := counter.insertChunk(count, forumID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
l.RUnlock()
|
||||
return nil
|
||||
}
|
||||
e := cLoop(&co.oddLock, co.oddMap)
|
||||
if e != nil {
|
||||
return e
|
||||
|
||||
counter.evenLock.RLock()
|
||||
evenMap := counter.evenMap
|
||||
counter.evenLock.RUnlock()
|
||||
for forumID, forum := range evenMap {
|
||||
var count int
|
||||
forum.RLock()
|
||||
count = forum.counter
|
||||
forum.RUnlock()
|
||||
// TODO: Only delete the bucket when it's zero to avoid hitting popular forums?
|
||||
counter.evenLock.Lock()
|
||||
delete(counter.evenMap, forumID)
|
||||
counter.evenLock.Unlock()
|
||||
err := counter.insertChunk(count, forumID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return cLoop(&co.evenLock, co.evenMap)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (co *DefaultForumViewCounter) insertChunk(count, forum int) error {
|
||||
func (counter *DefaultForumViewCounter) insertChunk(count int, forum int) error {
|
||||
if count == 0 {
|
||||
return nil
|
||||
}
|
||||
c.DebugLogf("Inserting a vchunk with a count of %d for forum %d", count, forum)
|
||||
_, e := co.insert.Exec(count, forum)
|
||||
return e
|
||||
common.DebugLogf("Inserting a viewchunk with a count of %d for forum %d", count, forum)
|
||||
_, err := counter.insert.Exec(count, forum)
|
||||
return err
|
||||
}
|
||||
|
||||
func (co *DefaultForumViewCounter) Bump(fid int) {
|
||||
func (counter *DefaultForumViewCounter) Bump(forumID int) {
|
||||
// Is the ID even?
|
||||
if fid%2 == 0 {
|
||||
co.evenLock.RLock()
|
||||
f, ok := co.evenMap[fid]
|
||||
co.evenLock.RUnlock()
|
||||
if forumID%2 == 0 {
|
||||
counter.evenLock.RLock()
|
||||
forum, ok := counter.evenMap[forumID]
|
||||
counter.evenLock.RUnlock()
|
||||
if ok {
|
||||
f.Lock()
|
||||
f.counter++
|
||||
f.Unlock()
|
||||
forum.Lock()
|
||||
forum.counter++
|
||||
forum.Unlock()
|
||||
} else {
|
||||
co.evenLock.Lock()
|
||||
co.evenMap[fid] = &RWMutexCounterBucket{counter: 1}
|
||||
co.evenLock.Unlock()
|
||||
counter.evenLock.Lock()
|
||||
counter.evenMap[forumID] = &RWMutexCounterBucket{counter: 1}
|
||||
counter.evenLock.Unlock()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
co.oddLock.RLock()
|
||||
f, ok := co.oddMap[fid]
|
||||
co.oddLock.RUnlock()
|
||||
counter.oddLock.RLock()
|
||||
forum, ok := counter.oddMap[forumID]
|
||||
counter.oddLock.RUnlock()
|
||||
if ok {
|
||||
f.Lock()
|
||||
f.counter++
|
||||
f.Unlock()
|
||||
forum.Lock()
|
||||
forum.counter++
|
||||
forum.Unlock()
|
||||
} else {
|
||||
co.oddLock.Lock()
|
||||
co.oddMap[fid] = &RWMutexCounterBucket{counter: 1}
|
||||
co.oddLock.Unlock()
|
||||
counter.oddLock.Lock()
|
||||
counter.oddMap[forumID] = &RWMutexCounterBucket{counter: 1}
|
||||
counter.oddLock.Unlock()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,19 +1,14 @@
|
||||
package counters
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"sync/atomic"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
import "database/sql"
|
||||
import ".."
|
||||
import "../../query_gen/lib"
|
||||
|
||||
var LangViewCounter *DefaultLangViewCounter
|
||||
|
||||
var langCodes = []string{
|
||||
"unknown",
|
||||
"",
|
||||
"none",
|
||||
"af",
|
||||
"ar",
|
||||
"az",
|
||||
@ -52,7 +47,6 @@ var langCodes = []string{
|
||||
"kn",
|
||||
"ko",
|
||||
"kok",
|
||||
"kw",
|
||||
"ky",
|
||||
"lt",
|
||||
"lv",
|
||||
@ -100,78 +94,74 @@ var langCodes = []string{
|
||||
}
|
||||
|
||||
type DefaultLangViewCounter struct {
|
||||
//buckets []*MutexCounterBucket //[OSID]count
|
||||
buckets []int64 //[OSID]count
|
||||
buckets []*RWMutexCounterBucket //[OSID]count
|
||||
codesToIndices map[string]int
|
||||
|
||||
insert *sql.Stmt
|
||||
insert *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultLangViewCounter(acc *qgen.Accumulator) (*DefaultLangViewCounter, error) {
|
||||
codesToIndices := make(map[string]int, len(langCodes))
|
||||
func NewDefaultLangViewCounter() (*DefaultLangViewCounter, error) {
|
||||
acc := qgen.NewAcc()
|
||||
|
||||
var langBuckets = make([]*RWMutexCounterBucket, len(langCodes))
|
||||
for bucketID, _ := range langBuckets {
|
||||
langBuckets[bucketID] = &RWMutexCounterBucket{counter: 0}
|
||||
}
|
||||
var codesToIndices = make(map[string]int)
|
||||
for index, code := range langCodes {
|
||||
codesToIndices[code] = index
|
||||
}
|
||||
co := &DefaultLangViewCounter{
|
||||
buckets: make([]int64, len(langCodes)),
|
||||
|
||||
counter := &DefaultLangViewCounter{
|
||||
buckets: langBuckets,
|
||||
codesToIndices: codesToIndices,
|
||||
insert: acc.Insert("viewchunks_langs").Columns("count,createdAt,lang").Fields("?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
insert: acc.Insert("viewchunks_langs").Columns("count, createdAt, lang").Fields("?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
}
|
||||
|
||||
c.Tasks.FifteenMin.Add(co.Tick)
|
||||
//c.Tasks.Sec.Add(co.Tick)
|
||||
c.Tasks.Shutdown.Add(co.Tick)
|
||||
return co, acc.FirstError()
|
||||
common.AddScheduledFifteenMinuteTask(counter.Tick)
|
||||
//common.AddScheduledSecondTask(counter.Tick)
|
||||
common.AddShutdownTask(counter.Tick)
|
||||
return counter, acc.FirstError()
|
||||
}
|
||||
|
||||
func (co *DefaultLangViewCounter) Tick() error {
|
||||
for id := 0; id < len(co.buckets); id++ {
|
||||
count := atomic.SwapInt64(&co.buckets[id], 0)
|
||||
e := co.insertChunk(count, id) // TODO: Bulk insert for speed?
|
||||
if e != nil {
|
||||
return errors.Wrap(errors.WithStack(e), "langview counter")
|
||||
func (counter *DefaultLangViewCounter) Tick() error {
|
||||
for id, bucket := range counter.buckets {
|
||||
var count int
|
||||
bucket.RLock()
|
||||
count = bucket.counter
|
||||
bucket.counter = 0 // TODO: Add a SetZero method to reduce the amount of duplicate code between the OS and agent counters?
|
||||
bucket.RUnlock()
|
||||
|
||||
err := counter.insertChunk(count, id) // TODO: Bulk insert for speed?
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (co *DefaultLangViewCounter) insertChunk(count int64, id int) error {
|
||||
func (counter *DefaultLangViewCounter) insertChunk(count int, id int) error {
|
||||
if count == 0 {
|
||||
return nil
|
||||
}
|
||||
langCode := langCodes[id]
|
||||
if langCode == "" {
|
||||
langCode = "none"
|
||||
}
|
||||
c.DebugLogf("Inserting a vchunk with a count of %d for lang %s (%d)", count, langCode, id)
|
||||
_, e := co.insert.Exec(count, langCode)
|
||||
return e
|
||||
var langCode = langCodes[id]
|
||||
common.DebugLogf("Inserting a viewchunk with a count of %d for lang %s (%d)", count, langCode, id)
|
||||
_, err := counter.insert.Exec(count, langCode)
|
||||
return err
|
||||
}
|
||||
|
||||
func (co *DefaultLangViewCounter) Bump(langCode string) (validCode bool) {
|
||||
validCode = true
|
||||
id, ok := co.codesToIndices[langCode]
|
||||
func (counter *DefaultLangViewCounter) Bump(langCode string) {
|
||||
id, ok := counter.codesToIndices[langCode]
|
||||
if !ok {
|
||||
// TODO: Tell the caller that the code's invalid
|
||||
id = 0 // Unknown
|
||||
validCode = false
|
||||
}
|
||||
|
||||
// TODO: Test this check
|
||||
c.DebugDetail("buckets ", id, ": ", co.buckets[id])
|
||||
if len(co.buckets) <= id || id < 0 {
|
||||
return validCode
|
||||
}
|
||||
atomic.AddInt64(&co.buckets[id], 1)
|
||||
|
||||
return validCode
|
||||
}
|
||||
|
||||
func (co *DefaultLangViewCounter) Bump2(id int) {
|
||||
// TODO: Test this check
|
||||
c.DebugDetail("bucket ", id, ": ", co.buckets[id])
|
||||
if len(co.buckets) <= id || id < 0 {
|
||||
common.DebugDetail("counter.buckets[", id, "]: ", counter.buckets[id])
|
||||
if len(counter.buckets) <= id || id < 0 {
|
||||
return
|
||||
}
|
||||
atomic.AddInt64(&co.buckets[id], 1)
|
||||
counter.buckets[id].Lock()
|
||||
counter.buckets[id].counter++
|
||||
counter.buckets[id].Unlock()
|
||||
}
|
||||
|
@ -1,92 +0,0 @@
|
||||
package counters
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"runtime"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
var MemoryCounter *DefaultMemoryCounter
|
||||
|
||||
type DefaultMemoryCounter struct {
|
||||
insert *sql.Stmt
|
||||
|
||||
totMem uint64
|
||||
totCount uint64
|
||||
stackMem uint64
|
||||
stackCount uint64
|
||||
heapMem uint64
|
||||
heapCount uint64
|
||||
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
func NewMemoryCounter(acc *qgen.Accumulator) (*DefaultMemoryCounter, error) {
|
||||
co := &DefaultMemoryCounter{
|
||||
insert: acc.Insert("memchunks").Columns("count,stack,heap,createdAt").Fields("?,?,?,UTC_TIMESTAMP()").Prepare(),
|
||||
}
|
||||
c.Tasks.FifteenMin.Add(co.Tick)
|
||||
//c.Tasks.Sec.Add(co.Tick)
|
||||
c.Tasks.Shutdown.Add(co.Tick)
|
||||
ticker := time.NewTicker(time.Minute)
|
||||
go func() {
|
||||
defer c.EatPanics()
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
var m runtime.MemStats
|
||||
runtime.ReadMemStats(&m)
|
||||
co.Lock()
|
||||
co.totCount++
|
||||
co.totMem += m.Sys
|
||||
co.stackCount++
|
||||
co.stackMem += m.StackInuse
|
||||
co.heapCount++
|
||||
co.heapMem += m.HeapAlloc
|
||||
co.Unlock()
|
||||
}
|
||||
}
|
||||
}()
|
||||
return co, acc.FirstError()
|
||||
}
|
||||
|
||||
func (co *DefaultMemoryCounter) Tick() (e error) {
|
||||
var m runtime.MemStats
|
||||
runtime.ReadMemStats(&m)
|
||||
var rTotMem, rTotCount, rStackMem, rStackCount, rHeapMem, rHeapCount uint64
|
||||
|
||||
co.Lock()
|
||||
|
||||
rTotMem = co.totMem
|
||||
rTotCount = co.totCount
|
||||
rStackMem = co.stackMem
|
||||
rStackCount = co.stackCount
|
||||
rHeapMem = co.heapMem
|
||||
rHeapCount = co.heapCount
|
||||
|
||||
co.totMem = 0
|
||||
co.totCount = 0
|
||||
co.stackMem = 0
|
||||
co.stackCount = 0
|
||||
co.heapMem = 0
|
||||
co.heapCount = 0
|
||||
|
||||
co.Unlock()
|
||||
|
||||
var avgMem, avgStack, avgHeap uint64
|
||||
avgMem = (rTotMem + m.Sys) / (rTotCount + 1)
|
||||
avgStack = (rStackMem + m.StackInuse) / (rStackCount + 1)
|
||||
avgHeap = (rHeapMem + m.HeapAlloc) / (rHeapCount + 1)
|
||||
|
||||
c.DebugLogf("Inserting a memchunk with a value of %d - %d - %d", avgMem, avgStack, avgHeap)
|
||||
_, e = co.insert.Exec(avgMem, avgStack, avgHeap)
|
||||
if e != nil {
|
||||
return errors.Wrap(errors.WithStack(e), "mem counter")
|
||||
}
|
||||
return nil
|
||||
}
|
@ -1,118 +0,0 @@
|
||||
package counters
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
var PerfCounter *DefaultPerfCounter
|
||||
|
||||
type PerfCounterBucket struct {
|
||||
low *MutexCounter64Bucket
|
||||
high *MutexCounter64Bucket
|
||||
avg *MutexCounter64Bucket
|
||||
}
|
||||
|
||||
// TODO: Track perf on a per route basis
|
||||
type DefaultPerfCounter struct {
|
||||
buckets []*PerfCounterBucket
|
||||
|
||||
insert *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultPerfCounter(acc *qgen.Accumulator) (*DefaultPerfCounter, error) {
|
||||
co := &DefaultPerfCounter{
|
||||
buckets: []*PerfCounterBucket{
|
||||
{
|
||||
low: &MutexCounter64Bucket{counter: math.MaxInt64},
|
||||
high: &MutexCounter64Bucket{counter: 0},
|
||||
avg: &MutexCounter64Bucket{counter: 0},
|
||||
},
|
||||
},
|
||||
insert: acc.Insert("perfchunks").Columns("low,high,avg,createdAt").Fields("?,?,?,UTC_TIMESTAMP()").Prepare(),
|
||||
}
|
||||
|
||||
c.Tasks.FifteenMin.Add(co.Tick)
|
||||
//c.Tasks.Sec.Add(co.Tick)
|
||||
c.Tasks.Shutdown.Add(co.Tick)
|
||||
return co, acc.FirstError()
|
||||
}
|
||||
|
||||
func (co *DefaultPerfCounter) Tick() error {
|
||||
getCounter := func(b *MutexCounter64Bucket) (c int64) {
|
||||
b.Lock()
|
||||
c = b.counter
|
||||
b.counter = 0
|
||||
b.Unlock()
|
||||
return c
|
||||
}
|
||||
var low int64
|
||||
hTbl := c.GetHookTable()
|
||||
for _, b := range co.buckets {
|
||||
b.low.Lock()
|
||||
low, b.low.counter = b.low.counter, math.MaxInt64
|
||||
b.low.Unlock()
|
||||
if low == math.MaxInt64 {
|
||||
low = 0
|
||||
}
|
||||
high := getCounter(b.high)
|
||||
avg := getCounter(b.avg)
|
||||
c.H_counters_perf_tick_row_hook(hTbl, low, high, avg)
|
||||
if e := co.insertChunk(low, high, avg); e != nil { // TODO: Bulk insert for speed?
|
||||
return errors.Wrap(errors.WithStack(e), "perf counter")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (co *DefaultPerfCounter) insertChunk(low, high, avg int64) error {
|
||||
if low == 0 && high == 0 && avg == 0 {
|
||||
return nil
|
||||
}
|
||||
c.DebugLogf("Inserting a pchunk with low %d, high %d, avg %d", low, high, avg)
|
||||
if c.Dev.LogNewLongRoute && high > (5*1000*1000) {
|
||||
c.Logf("pchunk high %d", high)
|
||||
}
|
||||
_, e := co.insert.Exec(low, high, avg)
|
||||
return e
|
||||
}
|
||||
|
||||
func (co *DefaultPerfCounter) Push(dur time.Duration /*,_ bool*/) {
|
||||
id := 0
|
||||
b := co.buckets[id]
|
||||
//c.DebugDetail("buckets ", id, ": ", b)
|
||||
micro := dur.Microseconds()
|
||||
if micro >= math.MaxInt32 {
|
||||
c.LogWarning(errors.New("dur should not be int32 max or higher"))
|
||||
}
|
||||
|
||||
low := b.low
|
||||
low.Lock()
|
||||
if micro < low.counter {
|
||||
low.counter = micro
|
||||
}
|
||||
low.Unlock()
|
||||
|
||||
high := b.high
|
||||
high.Lock()
|
||||
if micro > high.counter {
|
||||
high.counter = micro
|
||||
}
|
||||
high.Unlock()
|
||||
|
||||
avg := b.avg
|
||||
avg.Lock()
|
||||
if micro != avg.counter {
|
||||
if avg.counter == 0 {
|
||||
avg.counter = micro
|
||||
} else {
|
||||
avg.counter = (micro + avg.counter) / 2
|
||||
}
|
||||
}
|
||||
avg.Unlock()
|
||||
}
|
@ -4,9 +4,8 @@ import (
|
||||
"database/sql"
|
||||
"sync/atomic"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
".."
|
||||
"../../query_gen/lib"
|
||||
)
|
||||
|
||||
var PostCounter *DefaultPostCounter
|
||||
@ -20,44 +19,40 @@ type DefaultPostCounter struct {
|
||||
|
||||
func NewPostCounter() (*DefaultPostCounter, error) {
|
||||
acc := qgen.NewAcc()
|
||||
co := &DefaultPostCounter{
|
||||
counter := &DefaultPostCounter{
|
||||
currentBucket: 0,
|
||||
insert: acc.Insert("postchunks").Columns("count,createdAt").Fields("?,UTC_TIMESTAMP()").Prepare(),
|
||||
insert: acc.Insert("postchunks").Columns("count, createdAt").Fields("?,UTC_TIMESTAMP()").Prepare(),
|
||||
}
|
||||
c.Tasks.FifteenMin.Add(co.Tick)
|
||||
//c.Tasks.Sec.Add(co.Tick)
|
||||
c.Tasks.Shutdown.Add(co.Tick)
|
||||
return co, acc.FirstError()
|
||||
common.AddScheduledFifteenMinuteTask(counter.Tick)
|
||||
//common.AddScheduledSecondTask(counter.Tick)
|
||||
common.AddShutdownTask(counter.Tick)
|
||||
return counter, acc.FirstError()
|
||||
}
|
||||
|
||||
func (co *DefaultPostCounter) Tick() (err error) {
|
||||
oldBucket := co.currentBucket
|
||||
func (counter *DefaultPostCounter) Tick() (err error) {
|
||||
var oldBucket = counter.currentBucket
|
||||
var nextBucket int64 // 0
|
||||
if co.currentBucket == 0 {
|
||||
if counter.currentBucket == 0 {
|
||||
nextBucket = 1
|
||||
}
|
||||
atomic.AddInt64(&co.buckets[oldBucket], co.buckets[nextBucket])
|
||||
atomic.StoreInt64(&co.buckets[nextBucket], 0)
|
||||
atomic.StoreInt64(&co.currentBucket, nextBucket)
|
||||
atomic.AddInt64(&counter.buckets[oldBucket], counter.buckets[nextBucket])
|
||||
atomic.StoreInt64(&counter.buckets[nextBucket], 0)
|
||||
atomic.StoreInt64(&counter.currentBucket, nextBucket)
|
||||
|
||||
previousViewChunk := co.buckets[oldBucket]
|
||||
atomic.AddInt64(&co.buckets[oldBucket], -previousViewChunk)
|
||||
err = co.insertChunk(previousViewChunk)
|
||||
if err != nil {
|
||||
return errors.Wrap(errors.WithStack(err), "post counter")
|
||||
}
|
||||
return nil
|
||||
var previousViewChunk = counter.buckets[oldBucket]
|
||||
atomic.AddInt64(&counter.buckets[oldBucket], -previousViewChunk)
|
||||
return counter.insertChunk(previousViewChunk)
|
||||
}
|
||||
|
||||
func (co *DefaultPostCounter) Bump() {
|
||||
atomic.AddInt64(&co.buckets[co.currentBucket], 1)
|
||||
func (counter *DefaultPostCounter) Bump() {
|
||||
atomic.AddInt64(&counter.buckets[counter.currentBucket], 1)
|
||||
}
|
||||
|
||||
func (co *DefaultPostCounter) insertChunk(count int64) error {
|
||||
func (counter *DefaultPostCounter) insertChunk(count int64) error {
|
||||
if count == 0 {
|
||||
return nil
|
||||
}
|
||||
c.DebugLogf("Inserting a postchunk with a count of %d", count)
|
||||
_, err := co.insert.Exec(count)
|
||||
common.DebugLogf("Inserting a postchunk with a count of %d", count)
|
||||
_, err := counter.insert.Exec(count)
|
||||
return err
|
||||
}
|
||||
|
@ -5,9 +5,8 @@ import (
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
".."
|
||||
"../../query_gen/lib"
|
||||
)
|
||||
|
||||
var ReferrerTracker *DefaultReferrerTracker
|
||||
@ -35,11 +34,11 @@ func NewDefaultReferrerTracker() (*DefaultReferrerTracker, error) {
|
||||
refTracker := &DefaultReferrerTracker{
|
||||
odd: make(map[string]*ReferrerItem),
|
||||
even: make(map[string]*ReferrerItem),
|
||||
insert: acc.Insert("viewchunks_referrers").Columns("count,createdAt,domain").Fields("?,UTC_TIMESTAMP(),?").Prepare(), // TODO: Do something more efficient than doing a query for each referrer
|
||||
insert: acc.Insert("viewchunks_referrers").Columns("count, createdAt, domain").Fields("?,UTC_TIMESTAMP(),?").Prepare(), // TODO: Do something more efficient than doing a query for each referrer
|
||||
}
|
||||
c.Tasks.FifteenMin.Add(refTracker.Tick)
|
||||
//c.Tasks.Sec.Add(refTracker.Tick)
|
||||
c.Tasks.Shutdown.Add(refTracker.Tick)
|
||||
common.AddScheduledFifteenMinuteTask(refTracker.Tick)
|
||||
//common.AddScheduledSecondTask(refTracker.Tick)
|
||||
common.AddShutdownTask(refTracker.Tick)
|
||||
return refTracker, acc.FirstError()
|
||||
}
|
||||
|
||||
@ -51,41 +50,50 @@ func (ref *DefaultReferrerTracker) Tick() (err error) {
|
||||
if count != 0 {
|
||||
err := ref.insertChunk(referrer, count) // TODO: Bulk insert for speed?
|
||||
if err != nil {
|
||||
return errors.Wrap(errors.WithStack(err), "ref counter")
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
delete(referrersToDelete, referrer)
|
||||
}
|
||||
|
||||
// Run the queries and schedule zero view refs for deletion from memory
|
||||
refLoop := func(l *sync.RWMutex, m map[string]*ReferrerItem) error {
|
||||
l.Lock()
|
||||
defer l.Unlock()
|
||||
for referrer, counter := range m {
|
||||
if counter.Count == 0 {
|
||||
referrersToDelete[referrer] = counter
|
||||
delete(m, referrer)
|
||||
}
|
||||
count := atomic.SwapInt64(&counter.Count, 0)
|
||||
err := ref.insertChunk(referrer, count) // TODO: Bulk insert for speed?
|
||||
if err != nil {
|
||||
return errors.Wrap(errors.WithStack(err), "ref counter")
|
||||
}
|
||||
ref.oddLock.Lock()
|
||||
for referrer, counter := range ref.odd {
|
||||
if counter.Count == 0 {
|
||||
referrersToDelete[referrer] = counter
|
||||
delete(ref.odd, referrer)
|
||||
}
|
||||
count := atomic.SwapInt64(&counter.Count, 0)
|
||||
err := ref.insertChunk(referrer, count) // TODO: Bulk insert for speed?
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
err = refLoop(&ref.oddLock, ref.odd)
|
||||
if err != nil {
|
||||
return err
|
||||
ref.oddLock.Unlock()
|
||||
|
||||
ref.evenLock.Lock()
|
||||
for referrer, counter := range ref.even {
|
||||
if counter.Count == 0 {
|
||||
referrersToDelete[referrer] = counter
|
||||
delete(ref.even, referrer)
|
||||
}
|
||||
count := atomic.SwapInt64(&counter.Count, 0)
|
||||
err := ref.insertChunk(referrer, count) // TODO: Bulk insert for speed?
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return refLoop(&ref.evenLock, ref.even)
|
||||
ref.evenLock.Unlock()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ref *DefaultReferrerTracker) insertChunk(referrer string, count int64) error {
|
||||
if count == 0 {
|
||||
return nil
|
||||
}
|
||||
c.DebugDetailf("Inserting a vchunk with a count of %d for ref %s", count, referrer)
|
||||
common.DebugDetailf("Inserting a viewchunk with a count of %d for referrer %s", count, referrer)
|
||||
_, err := ref.insert.Exec(count, referrer)
|
||||
return err
|
||||
}
|
||||
|
@ -4,9 +4,8 @@ import (
|
||||
"database/sql"
|
||||
"sync/atomic"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
".."
|
||||
"../../query_gen/lib"
|
||||
)
|
||||
|
||||
// TODO: Rename this?
|
||||
@ -21,45 +20,40 @@ type DefaultViewCounter struct {
|
||||
}
|
||||
|
||||
func NewGlobalViewCounter(acc *qgen.Accumulator) (*DefaultViewCounter, error) {
|
||||
co := &DefaultViewCounter{
|
||||
counter := &DefaultViewCounter{
|
||||
currentBucket: 0,
|
||||
insert: acc.Insert("viewchunks").Columns("count,createdAt,route").Fields("?,UTC_TIMESTAMP(),''").Prepare(),
|
||||
insert: acc.Insert("viewchunks").Columns("count, createdAt").Fields("?,UTC_TIMESTAMP()").Prepare(),
|
||||
}
|
||||
c.Tasks.FifteenMin.Add(co.Tick) // This is run once every fifteen minutes to match the frequency of the RouteViewCounter
|
||||
//c.Tasks.Sec.Add(co.Tick)
|
||||
c.Tasks.Shutdown.Add(co.Tick)
|
||||
return co, acc.FirstError()
|
||||
common.AddScheduledFifteenMinuteTask(counter.Tick) // This is run once every fifteen minutes to match the frequency of the RouteViewCounter
|
||||
//common.AddScheduledSecondTask(counter.Tick)
|
||||
common.AddShutdownTask(counter.Tick)
|
||||
return counter, acc.FirstError()
|
||||
}
|
||||
|
||||
// TODO: Simplify the atomics used here
|
||||
func (co *DefaultViewCounter) Tick() (err error) {
|
||||
oldBucket := co.currentBucket
|
||||
func (counter *DefaultViewCounter) Tick() (err error) {
|
||||
var oldBucket = counter.currentBucket
|
||||
var nextBucket int64 // 0
|
||||
if co.currentBucket == 0 {
|
||||
if counter.currentBucket == 0 {
|
||||
nextBucket = 1
|
||||
}
|
||||
atomic.AddInt64(&co.buckets[oldBucket], co.buckets[nextBucket])
|
||||
atomic.StoreInt64(&co.buckets[nextBucket], 0)
|
||||
atomic.StoreInt64(&co.currentBucket, nextBucket)
|
||||
atomic.AddInt64(&counter.buckets[oldBucket], counter.buckets[nextBucket])
|
||||
atomic.StoreInt64(&counter.buckets[nextBucket], 0)
|
||||
atomic.StoreInt64(&counter.currentBucket, nextBucket)
|
||||
|
||||
previousViewChunk := co.buckets[oldBucket]
|
||||
atomic.AddInt64(&co.buckets[oldBucket], -previousViewChunk)
|
||||
err = co.insertChunk(previousViewChunk)
|
||||
if err != nil {
|
||||
return errors.Wrap(errors.WithStack(err), "req counter")
|
||||
}
|
||||
return nil
|
||||
var previousViewChunk = counter.buckets[oldBucket]
|
||||
atomic.AddInt64(&counter.buckets[oldBucket], -previousViewChunk)
|
||||
return counter.insertChunk(previousViewChunk)
|
||||
}
|
||||
|
||||
func (co *DefaultViewCounter) Bump() {
|
||||
atomic.AddInt64(&co.buckets[co.currentBucket], 1)
|
||||
func (counter *DefaultViewCounter) Bump() {
|
||||
atomic.AddInt64(&counter.buckets[counter.currentBucket], 1)
|
||||
}
|
||||
|
||||
func (co *DefaultViewCounter) insertChunk(count int64) error {
|
||||
func (counter *DefaultViewCounter) insertChunk(count int64) error {
|
||||
if count == 0 {
|
||||
return nil
|
||||
}
|
||||
c.DebugLogf("Inserting a vchunk with a count of %d", count)
|
||||
_, err := co.insert.Exec(count)
|
||||
common.DebugLogf("Inserting a viewchunk with a count of %d", count)
|
||||
_, err := counter.insert.Exec(count)
|
||||
return err
|
||||
}
|
||||
|
@ -1,187 +1,66 @@
|
||||
package counters
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"git.tuxpa.in/a/gosora/uutils"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
import "database/sql"
|
||||
import ".."
|
||||
import "../../query_gen/lib"
|
||||
|
||||
var RouteViewCounter *DefaultRouteViewCounter
|
||||
|
||||
type RVBucket struct {
|
||||
counter int64
|
||||
avg int
|
||||
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
// TODO: Make this lockless?
|
||||
type DefaultRouteViewCounter struct {
|
||||
buckets []*RVBucket //[RouteID]count
|
||||
buckets []*RWMutexCounterBucket //[RouteID]count
|
||||
insert *sql.Stmt
|
||||
insert5 *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultRouteViewCounter(acc *qgen.Accumulator) (*DefaultRouteViewCounter, error) {
|
||||
routeBuckets := make([]*RVBucket, len(routeMapEnum))
|
||||
func NewDefaultRouteViewCounter() (*DefaultRouteViewCounter, error) {
|
||||
acc := qgen.NewAcc()
|
||||
var routeBuckets = make([]*RWMutexCounterBucket, len(routeMapEnum))
|
||||
for bucketID, _ := range routeBuckets {
|
||||
routeBuckets[bucketID] = &RVBucket{counter: 0, avg: 0}
|
||||
routeBuckets[bucketID] = &RWMutexCounterBucket{counter: 0}
|
||||
}
|
||||
|
||||
fields := "?,?,UTC_TIMESTAMP(),?"
|
||||
co := &DefaultRouteViewCounter{
|
||||
counter := &DefaultRouteViewCounter{
|
||||
buckets: routeBuckets,
|
||||
insert: acc.Insert("viewchunks").Columns("count,avg,createdAt,route").Fields(fields).Prepare(),
|
||||
insert5: acc.BulkInsert("viewchunks").Columns("count,avg,createdAt,route").Fields(fields, fields, fields, fields, fields).Prepare(),
|
||||
insert: acc.Insert("viewchunks").Columns("count, createdAt, route").Fields("?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
}
|
||||
if !c.Config.DisableAnalytics {
|
||||
c.Tasks.FifteenMin.Add(co.Tick) // There could be a lot of routes, so we don't want to be running this every second
|
||||
//c.Tasks.Sec.Add(co.Tick)
|
||||
c.Tasks.Shutdown.Add(co.Tick)
|
||||
}
|
||||
return co, acc.FirstError()
|
||||
common.AddScheduledFifteenMinuteTask(counter.Tick) // There could be a lot of routes, so we don't want to be running this every second
|
||||
//common.AddScheduledSecondTask(counter.Tick)
|
||||
common.AddShutdownTask(counter.Tick)
|
||||
return counter, acc.FirstError()
|
||||
}
|
||||
|
||||
type RVCount struct {
|
||||
RouteID int
|
||||
Count int64
|
||||
Avg int
|
||||
}
|
||||
func (counter *DefaultRouteViewCounter) Tick() error {
|
||||
for routeID, routeBucket := range counter.buckets {
|
||||
var count int
|
||||
routeBucket.RLock()
|
||||
count = routeBucket.counter
|
||||
routeBucket.counter = 0
|
||||
routeBucket.RUnlock()
|
||||
|
||||
func (co *DefaultRouteViewCounter) Tick() (err error) {
|
||||
var tb []RVCount
|
||||
for routeID, b := range co.buckets {
|
||||
var avg int
|
||||
count := atomic.SwapInt64(&b.counter, 0)
|
||||
b.Lock()
|
||||
avg = b.avg
|
||||
b.avg = 0
|
||||
b.Unlock()
|
||||
|
||||
if count == 0 {
|
||||
continue
|
||||
}
|
||||
tb = append(tb, RVCount{routeID, count, avg})
|
||||
}
|
||||
|
||||
// TODO: Expand on this?
|
||||
var i int
|
||||
if len(tb) >= 5 {
|
||||
for ; len(tb) > (i + 5); i += 5 {
|
||||
err := co.insert5Chunk(tb[i : i+5])
|
||||
if err != nil {
|
||||
c.DebugLogf("tb: %+v\n", tb)
|
||||
c.DebugLog("i: ", i)
|
||||
return errors.Wrap(errors.WithStack(err), "route counter x 5")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for ; len(tb) > i; i++ {
|
||||
it := tb[i]
|
||||
err = co.insertChunk(it.Count, it.Avg, it.RouteID)
|
||||
err := counter.insertChunk(count, routeID) // TODO: Bulk insert for speed?
|
||||
if err != nil {
|
||||
c.DebugLogf("tb: %+v\n", tb)
|
||||
c.DebugLog("i: ", i)
|
||||
return errors.Wrap(errors.WithStack(err), "route counter")
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (co *DefaultRouteViewCounter) insertChunk(count int64, avg, route int) error {
|
||||
routeName := reverseRouteMapEnum[route]
|
||||
c.DebugLogf("Inserting vchunk with count %d, avg %d for route %s (%d)", count, avg, routeName, route)
|
||||
_, err := co.insert.Exec(count, avg, routeName)
|
||||
func (counter *DefaultRouteViewCounter) insertChunk(count int, route int) error {
|
||||
if count == 0 {
|
||||
return nil
|
||||
}
|
||||
var routeName = reverseRouteMapEnum[route]
|
||||
common.DebugLogf("Inserting a viewchunk with a count of %d for route %s (%d)", count, routeName, route)
|
||||
_, err := counter.insert.Exec(count, routeName)
|
||||
return err
|
||||
}
|
||||
|
||||
func (co *DefaultRouteViewCounter) insert5Chunk(rvs []RVCount) error {
|
||||
args := make([]interface{}, len(rvs)*3)
|
||||
i := 0
|
||||
for _, rv := range rvs {
|
||||
routeName := reverseRouteMapEnum[rv.RouteID]
|
||||
if rv.Avg == 0 {
|
||||
c.DebugLogf("Queueing vchunk with count %d for routes %s (%d)", rv.Count, routeName, rv.RouteID)
|
||||
} else {
|
||||
c.DebugLogf("Queueing vchunk with count %d, avg %d for routes %s (%d)", rv.Count, rv.Avg, routeName, rv.RouteID)
|
||||
}
|
||||
args[i] = rv.Count
|
||||
args[i+1] = rv.Avg
|
||||
args[i+2] = routeName
|
||||
i += 3
|
||||
}
|
||||
c.DebugDetailf("args: %+v\n", args)
|
||||
_, err := co.insert5.Exec(args...)
|
||||
return err
|
||||
}
|
||||
|
||||
func (co *DefaultRouteViewCounter) Bump(route int) {
|
||||
if c.Config.DisableAnalytics {
|
||||
return
|
||||
}
|
||||
func (counter *DefaultRouteViewCounter) Bump(route int) {
|
||||
// TODO: Test this check
|
||||
b := co.buckets[route]
|
||||
c.DebugDetail("bucket ", route, ": ", b)
|
||||
if len(co.buckets) <= route || route < 0 {
|
||||
common.DebugDetail("counter.buckets[", route, "]: ", counter.buckets[route])
|
||||
if len(counter.buckets) <= route || route < 0 {
|
||||
return
|
||||
}
|
||||
atomic.AddInt64(&b.counter, 1)
|
||||
}
|
||||
|
||||
// TODO: Eliminate the lock?
|
||||
func (co *DefaultRouteViewCounter) Bump2(route int, t time.Time) {
|
||||
if c.Config.DisableAnalytics {
|
||||
return
|
||||
}
|
||||
// TODO: Test this check
|
||||
b := co.buckets[route]
|
||||
c.DebugDetail("bucket ", route, ": ", b)
|
||||
if len(co.buckets) <= route || route < 0 {
|
||||
return
|
||||
}
|
||||
micro := int(time.Since(t).Microseconds())
|
||||
//co.PerfCounter.Push(since, true)
|
||||
atomic.AddInt64(&b.counter, 1)
|
||||
b.Lock()
|
||||
if micro != b.avg {
|
||||
if b.avg == 0 {
|
||||
b.avg = micro
|
||||
} else {
|
||||
b.avg = (micro + b.avg) / 2
|
||||
}
|
||||
}
|
||||
b.Unlock()
|
||||
}
|
||||
|
||||
// TODO: Eliminate the lock?
|
||||
func (co *DefaultRouteViewCounter) Bump3(route int, nano int64) {
|
||||
if c.Config.DisableAnalytics {
|
||||
return
|
||||
}
|
||||
// TODO: Test this check
|
||||
b := co.buckets[route]
|
||||
c.DebugDetail("bucket ", route, ": ", b)
|
||||
if len(co.buckets) <= route || route < 0 {
|
||||
return
|
||||
}
|
||||
micro := int((uutils.Nanotime() - nano) / 1000)
|
||||
//co.PerfCounter.Push(since, true)
|
||||
atomic.AddInt64(&b.counter, 1)
|
||||
b.Lock()
|
||||
if micro != b.avg {
|
||||
if b.avg == 0 {
|
||||
b.avg = micro
|
||||
} else {
|
||||
b.avg = (micro + b.avg) / 2
|
||||
}
|
||||
}
|
||||
b.Unlock()
|
||||
counter.buckets[route].Lock()
|
||||
counter.buckets[route].counter++
|
||||
counter.buckets[route].Unlock()
|
||||
}
|
||||
|
@ -1,57 +1,65 @@
|
||||
package counters
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"sync/atomic"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
import "database/sql"
|
||||
import ".."
|
||||
import "../../query_gen/lib"
|
||||
|
||||
var OSViewCounter *DefaultOSViewCounter
|
||||
|
||||
type DefaultOSViewCounter struct {
|
||||
buckets []int64 //[OSID]count
|
||||
buckets []*RWMutexCounterBucket //[OSID]count
|
||||
insert *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultOSViewCounter(acc *qgen.Accumulator) (*DefaultOSViewCounter, error) {
|
||||
co := &DefaultOSViewCounter{
|
||||
buckets: make([]int64, len(osMapEnum)),
|
||||
insert: acc.Insert("viewchunks_systems").Columns("count,createdAt,system").Fields("?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
func NewDefaultOSViewCounter() (*DefaultOSViewCounter, error) {
|
||||
acc := qgen.NewAcc()
|
||||
var osBuckets = make([]*RWMutexCounterBucket, len(osMapEnum))
|
||||
for bucketID, _ := range osBuckets {
|
||||
osBuckets[bucketID] = &RWMutexCounterBucket{counter: 0}
|
||||
}
|
||||
c.Tasks.FifteenMin.Add(co.Tick)
|
||||
//c.Tasks.Sec.Add(co.Tick)
|
||||
c.Tasks.Shutdown.Add(co.Tick)
|
||||
return co, acc.FirstError()
|
||||
counter := &DefaultOSViewCounter{
|
||||
buckets: osBuckets,
|
||||
insert: acc.Insert("viewchunks_systems").Columns("count, createdAt, system").Fields("?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
}
|
||||
common.AddScheduledFifteenMinuteTask(counter.Tick)
|
||||
//common.AddScheduledSecondTask(counter.Tick)
|
||||
common.AddShutdownTask(counter.Tick)
|
||||
return counter, acc.FirstError()
|
||||
}
|
||||
|
||||
func (co *DefaultOSViewCounter) Tick() error {
|
||||
for id, _ := range co.buckets {
|
||||
count := atomic.SwapInt64(&co.buckets[id], 0)
|
||||
if e := co.insertChunk(count, id); e != nil { // TODO: Bulk insert for speed?
|
||||
return errors.Wrap(errors.WithStack(e), "system counter")
|
||||
func (counter *DefaultOSViewCounter) Tick() error {
|
||||
for id, bucket := range counter.buckets {
|
||||
var count int
|
||||
bucket.RLock()
|
||||
count = bucket.counter
|
||||
bucket.counter = 0 // TODO: Add a SetZero method to reduce the amount of duplicate code between the OS and agent counters?
|
||||
bucket.RUnlock()
|
||||
|
||||
err := counter.insertChunk(count, id) // TODO: Bulk insert for speed?
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (co *DefaultOSViewCounter) insertChunk(count int64, os int) error {
|
||||
func (counter *DefaultOSViewCounter) insertChunk(count int, os int) error {
|
||||
if count == 0 {
|
||||
return nil
|
||||
}
|
||||
osName := reverseOSMapEnum[os]
|
||||
c.DebugLogf("Inserting a vchunk with a count of %d for OS %s (%d)", count, osName, os)
|
||||
_, err := co.insert.Exec(count, osName)
|
||||
var osName = reverseOSMapEnum[os]
|
||||
common.DebugLogf("Inserting a viewchunk with a count of %d for OS %s (%d)", count, osName, os)
|
||||
_, err := counter.insert.Exec(count, osName)
|
||||
return err
|
||||
}
|
||||
|
||||
func (co *DefaultOSViewCounter) Bump(id int) {
|
||||
func (counter *DefaultOSViewCounter) Bump(id int) {
|
||||
// TODO: Test this check
|
||||
c.DebugDetail("bucket ", id, ": ", co.buckets[id])
|
||||
if len(co.buckets) <= id || id < 0 {
|
||||
common.DebugDetail("counter.buckets[", id, "]: ", counter.buckets[id])
|
||||
if len(counter.buckets) <= id || id < 0 {
|
||||
return
|
||||
}
|
||||
atomic.AddInt64(&co.buckets[id], 1)
|
||||
counter.buckets[id].Lock()
|
||||
counter.buckets[id].counter++
|
||||
counter.buckets[id].Unlock()
|
||||
}
|
||||
|
@ -4,9 +4,8 @@ import (
|
||||
"database/sql"
|
||||
"sync/atomic"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
".."
|
||||
"../../query_gen/lib"
|
||||
)
|
||||
|
||||
var TopicCounter *DefaultTopicCounter
|
||||
@ -20,44 +19,40 @@ type DefaultTopicCounter struct {
|
||||
|
||||
func NewTopicCounter() (*DefaultTopicCounter, error) {
|
||||
acc := qgen.NewAcc()
|
||||
co := &DefaultTopicCounter{
|
||||
counter := &DefaultTopicCounter{
|
||||
currentBucket: 0,
|
||||
insert: acc.Insert("topicchunks").Columns("count,createdAt").Fields("?,UTC_TIMESTAMP()").Prepare(),
|
||||
insert: acc.Insert("topicchunks").Columns("count, createdAt").Fields("?,UTC_TIMESTAMP()").Prepare(),
|
||||
}
|
||||
c.Tasks.FifteenMin.Add(co.Tick)
|
||||
//c.Tasks.Sec.Add(co.Tick)
|
||||
c.Tasks.Shutdown.Add(co.Tick)
|
||||
return co, acc.FirstError()
|
||||
common.AddScheduledFifteenMinuteTask(counter.Tick)
|
||||
//common.AddScheduledSecondTask(counter.Tick)
|
||||
common.AddShutdownTask(counter.Tick)
|
||||
return counter, acc.FirstError()
|
||||
}
|
||||
|
||||
func (co *DefaultTopicCounter) Tick() (e error) {
|
||||
oldBucket := co.currentBucket
|
||||
func (counter *DefaultTopicCounter) Tick() (err error) {
|
||||
var oldBucket = counter.currentBucket
|
||||
var nextBucket int64 // 0
|
||||
if co.currentBucket == 0 {
|
||||
if counter.currentBucket == 0 {
|
||||
nextBucket = 1
|
||||
}
|
||||
atomic.AddInt64(&co.buckets[oldBucket], co.buckets[nextBucket])
|
||||
atomic.StoreInt64(&co.buckets[nextBucket], 0)
|
||||
atomic.StoreInt64(&co.currentBucket, nextBucket)
|
||||
atomic.AddInt64(&counter.buckets[oldBucket], counter.buckets[nextBucket])
|
||||
atomic.StoreInt64(&counter.buckets[nextBucket], 0)
|
||||
atomic.StoreInt64(&counter.currentBucket, nextBucket)
|
||||
|
||||
previousViewChunk := co.buckets[oldBucket]
|
||||
atomic.AddInt64(&co.buckets[oldBucket], -previousViewChunk)
|
||||
e = co.insertChunk(previousViewChunk)
|
||||
if e != nil {
|
||||
return errors.Wrap(errors.WithStack(e), "topics counter")
|
||||
}
|
||||
return nil
|
||||
var previousViewChunk = counter.buckets[oldBucket]
|
||||
atomic.AddInt64(&counter.buckets[oldBucket], -previousViewChunk)
|
||||
return counter.insertChunk(previousViewChunk)
|
||||
}
|
||||
|
||||
func (co *DefaultTopicCounter) Bump() {
|
||||
atomic.AddInt64(&co.buckets[co.currentBucket], 1)
|
||||
func (counter *DefaultTopicCounter) Bump() {
|
||||
atomic.AddInt64(&counter.buckets[counter.currentBucket], 1)
|
||||
}
|
||||
|
||||
func (co *DefaultTopicCounter) insertChunk(count int64) error {
|
||||
func (counter *DefaultTopicCounter) insertChunk(count int64) error {
|
||||
if count == 0 {
|
||||
return nil
|
||||
}
|
||||
c.DebugLogf("Inserting a topicchunk with a count of %d", count)
|
||||
_, e := co.insert.Exec(count)
|
||||
return e
|
||||
common.DebugLogf("Inserting a topicchunk with a count of %d", count)
|
||||
_, err := counter.insert.Exec(count)
|
||||
return err
|
||||
}
|
||||
|
@ -2,15 +2,11 @@ package counters
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
c "git.tuxpa.in/a/gosora/common"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"github.com/pkg/errors"
|
||||
".."
|
||||
"../../query_gen/lib"
|
||||
)
|
||||
|
||||
var TopicViewCounter *DefaultTopicViewCounter
|
||||
@ -22,237 +18,117 @@ type DefaultTopicViewCounter struct {
|
||||
oddLock sync.RWMutex
|
||||
evenLock sync.RWMutex
|
||||
|
||||
weekState byte
|
||||
|
||||
update *sql.Stmt
|
||||
resetOdd *sql.Stmt
|
||||
resetEven *sql.Stmt
|
||||
resetBoth *sql.Stmt
|
||||
|
||||
insertListBuf []TopicViewInsert
|
||||
saveTick *SavedTick
|
||||
update *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultTopicViewCounter() (*DefaultTopicViewCounter, error) {
|
||||
acc := qgen.NewAcc()
|
||||
t := "topics"
|
||||
co := &DefaultTopicViewCounter{
|
||||
counter := &DefaultTopicViewCounter{
|
||||
oddTopics: make(map[int]*RWMutexCounterBucket),
|
||||
evenTopics: make(map[int]*RWMutexCounterBucket),
|
||||
|
||||
//update: acc.Update(t).Set("views=views+?").Where("tid=?").Prepare(),
|
||||
update: acc.Update(t).Set("views=views+?,weekEvenViews=weekEvenViews+?,weekOddViews=weekOddViews+?").Where("tid=?").Prepare(),
|
||||
resetOdd: acc.Update(t).Set("weekOddViews=0").Prepare(),
|
||||
resetEven: acc.Update(t).Set("weekEvenViews=0").Prepare(),
|
||||
resetBoth: acc.Update(t).Set("weekOddViews=0,weekEvenViews=0").Prepare(),
|
||||
|
||||
//insertListBuf: make([]TopicViewInsert, 1024),
|
||||
update: acc.Update("topics").Set("views = views + ?").Where("tid = ?").Prepare(),
|
||||
}
|
||||
e := co.WeekResetInit()
|
||||
if e != nil {
|
||||
return co, e
|
||||
}
|
||||
|
||||
tick := func(f func() error) {
|
||||
c.Tasks.FifteenMin.Add(f) // Who knows how many topics we have queued up, we probably don't want this running too frequently
|
||||
//c.Tasks.Sec.Add(f)
|
||||
c.Tasks.Shutdown.Add(f)
|
||||
}
|
||||
tick(co.Tick)
|
||||
tick(co.WeekResetTick)
|
||||
|
||||
return co, acc.FirstError()
|
||||
common.AddScheduledFifteenMinuteTask(counter.Tick) // Who knows how many topics we have queued up, we probably don't want this running too frequently
|
||||
//common.AddScheduledSecondTask(counter.Tick)
|
||||
common.AddShutdownTask(counter.Tick)
|
||||
return counter, acc.FirstError()
|
||||
}
|
||||
|
||||
type TopicViewInsert struct {
|
||||
Count int
|
||||
TopicID int
|
||||
}
|
||||
func (counter *DefaultTopicViewCounter) Tick() error {
|
||||
// TODO: Fold multiple 1 view topics into one query
|
||||
|
||||
type SavedTick struct {
|
||||
I int
|
||||
I2 int
|
||||
}
|
||||
|
||||
func (co *DefaultTopicViewCounter) handleInsertListBuf(i, i2 int) error {
|
||||
ilb := co.insertListBuf
|
||||
var lastSuccess int
|
||||
for i3 := i2; i3 < i; i3++ {
|
||||
iitem := ilb[i3]
|
||||
if e := co.insertChunk(iitem.Count, iitem.TopicID); e != nil {
|
||||
co.saveTick = &SavedTick{I: i, I2: lastSuccess + 1}
|
||||
for i3 := i2; i3 < i && i3 <= lastSuccess; i3++ {
|
||||
ilb[i3].Count, ilb[i3].TopicID = 0, 0
|
||||
}
|
||||
return errors.Wrap(errors.WithStack(e), "topicview counter")
|
||||
counter.oddLock.RLock()
|
||||
oddTopics := counter.oddTopics
|
||||
counter.oddLock.RUnlock()
|
||||
for topicID, topic := range oddTopics {
|
||||
var count int
|
||||
topic.RLock()
|
||||
count = topic.counter
|
||||
topic.RUnlock()
|
||||
// TODO: Only delete the bucket when it's zero to avoid hitting popular topics?
|
||||
counter.oddLock.Lock()
|
||||
delete(counter.oddTopics, topicID)
|
||||
counter.oddLock.Unlock()
|
||||
err := counter.insertChunk(count, topicID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
lastSuccess = i3
|
||||
}
|
||||
for i3 := i2; i3 < i; i3++ {
|
||||
ilb[i3].Count, ilb[i3].TopicID = 0, 0
|
||||
|
||||
counter.evenLock.RLock()
|
||||
evenTopics := counter.evenTopics
|
||||
counter.evenLock.RUnlock()
|
||||
for topicID, topic := range evenTopics {
|
||||
var count int
|
||||
topic.RLock()
|
||||
count = topic.counter
|
||||
topic.RUnlock()
|
||||
// TODO: Only delete the bucket when it's zero to avoid hitting popular topics?
|
||||
counter.evenLock.Lock()
|
||||
delete(counter.evenTopics, topicID)
|
||||
counter.evenLock.Unlock()
|
||||
err := counter.insertChunk(count, topicID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (co *DefaultTopicViewCounter) Tick() error {
|
||||
// TODO: Fold multiple 1 view topics into one query
|
||||
|
||||
/*if co.saveTick != nil {
|
||||
e := co.handleInsertListBuf(co.saveTick.I, co.saveTick.I2)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
co.saveTick = nil
|
||||
}*/
|
||||
|
||||
cLoop := func(l *sync.RWMutex, m map[int]*RWMutexCounterBucket) error {
|
||||
//i := 0
|
||||
l.RLock()
|
||||
for topicID, topic := range m {
|
||||
l.RUnlock()
|
||||
var count int
|
||||
topic.RLock()
|
||||
count = topic.counter
|
||||
topic.RUnlock()
|
||||
// TODO: Only delete the bucket when it's zero to avoid hitting popular topics?
|
||||
l.Lock()
|
||||
delete(m, topicID)
|
||||
l.Unlock()
|
||||
/*if len(co.insertListBuf) >= i {
|
||||
co.insertListBuf[i].Count = count
|
||||
co.insertListBuf[i].TopicID = topicID
|
||||
i++
|
||||
} else if i < 4096 {
|
||||
co.insertListBuf = append(co.insertListBuf, TopicViewInsert{count, topicID})
|
||||
} else */if e := co.insertChunk(count, topicID); e != nil {
|
||||
return errors.Wrap(errors.WithStack(e), "topicview counter")
|
||||
}
|
||||
l.RLock()
|
||||
}
|
||||
l.RUnlock()
|
||||
return nil //co.handleInsertListBuf(i, 0)
|
||||
}
|
||||
e := cLoop(&co.oddLock, co.oddTopics)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
return cLoop(&co.evenLock, co.evenTopics)
|
||||
}
|
||||
|
||||
func (co *DefaultTopicViewCounter) WeekResetInit() error {
|
||||
lastWeekResetStr, e := c.Meta.Get("lastWeekReset")
|
||||
if e != nil && e != sql.ErrNoRows {
|
||||
return e
|
||||
}
|
||||
|
||||
spl := strings.Split(lastWeekResetStr, "-")
|
||||
if len(spl) <= 1 {
|
||||
return nil
|
||||
}
|
||||
weekState, e := strconv.Atoi(spl[1])
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
co.weekState = byte(weekState)
|
||||
|
||||
unixLastWeekReset, e := strconv.ParseInt(spl[0], 10, 64)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
resetTime := time.Unix(unixLastWeekReset, 0)
|
||||
if time.Since(resetTime).Hours() >= (24 * 7) {
|
||||
_, e = co.resetBoth.Exec()
|
||||
}
|
||||
return e
|
||||
}
|
||||
|
||||
func (co *DefaultTopicViewCounter) WeekResetTick() (e error) {
|
||||
now := time.Now()
|
||||
_, week := now.ISOWeek()
|
||||
if week != int(co.weekState) {
|
||||
if week%2 == 0 { // is even?
|
||||
_, e = co.resetOdd.Exec()
|
||||
} else {
|
||||
_, e = co.resetEven.Exec()
|
||||
}
|
||||
co.weekState = byte(week)
|
||||
}
|
||||
// TODO: Retry?
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
return c.Meta.Set("lastWeekReset", strconv.FormatInt(now.Unix(), 10)+"-"+strconv.Itoa(week))
|
||||
}
|
||||
|
||||
// TODO: Optimise this further. E.g. Using IN() on every one view topic. Rinse and repeat for two views, three views, four views and five views.
|
||||
func (co *DefaultTopicViewCounter) insertChunk(count, topicID int) (err error) {
|
||||
func (counter *DefaultTopicViewCounter) insertChunk(count int, topicID int) error {
|
||||
if count == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
c.DebugLogf("Inserting %d views into topic %d", count, topicID)
|
||||
even, odd := 0, 0
|
||||
_, week := time.Now().ISOWeek()
|
||||
if week%2 == 0 { // is even?
|
||||
even += count
|
||||
} else {
|
||||
odd += count
|
||||
}
|
||||
|
||||
if true {
|
||||
_, err = co.update.Exec(count, even, odd, topicID)
|
||||
} else {
|
||||
_, err = co.update.Exec(count, topicID)
|
||||
}
|
||||
if err == sql.ErrNoRows {
|
||||
return nil
|
||||
} else if err != nil {
|
||||
common.DebugLogf("Inserting %d views into topic %d", count, topicID)
|
||||
_, err := counter.update.Exec(count, topicID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Add a way to disable this for extra speed ;)
|
||||
tc := c.Topics.GetCache()
|
||||
if tc != nil {
|
||||
t, err := tc.Get(topicID)
|
||||
if err == sql.ErrNoRows {
|
||||
return nil
|
||||
} else if err != nil {
|
||||
tcache := common.Topics.GetCache()
|
||||
if tcache != nil {
|
||||
topic, err := tcache.Get(topicID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
atomic.AddInt64(&t.ViewCount, int64(count))
|
||||
atomic.AddInt64(&topic.ViewCount, int64(count))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (co *DefaultTopicViewCounter) Bump(topicID int) {
|
||||
func (counter *DefaultTopicViewCounter) Bump(topicID int) {
|
||||
// Is the ID even?
|
||||
if topicID%2 == 0 {
|
||||
co.evenLock.RLock()
|
||||
t, ok := co.evenTopics[topicID]
|
||||
co.evenLock.RUnlock()
|
||||
counter.evenLock.RLock()
|
||||
topic, ok := counter.evenTopics[topicID]
|
||||
counter.evenLock.RUnlock()
|
||||
if ok {
|
||||
t.Lock()
|
||||
t.counter++
|
||||
t.Unlock()
|
||||
topic.Lock()
|
||||
topic.counter++
|
||||
topic.Unlock()
|
||||
} else {
|
||||
co.evenLock.Lock()
|
||||
co.evenTopics[topicID] = &RWMutexCounterBucket{counter: 1}
|
||||
co.evenLock.Unlock()
|
||||
counter.evenLock.Lock()
|
||||
counter.evenTopics[topicID] = &RWMutexCounterBucket{counter: 1}
|
||||
counter.evenLock.Unlock()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
co.oddLock.RLock()
|
||||
t, ok := co.oddTopics[topicID]
|
||||
co.oddLock.RUnlock()
|
||||
counter.oddLock.RLock()
|
||||
topic, ok := counter.oddTopics[topicID]
|
||||
counter.oddLock.RUnlock()
|
||||
if ok {
|
||||
t.Lock()
|
||||
t.counter++
|
||||
t.Unlock()
|
||||
topic.Lock()
|
||||
topic.counter++
|
||||
topic.Unlock()
|
||||
} else {
|
||||
co.oddLock.Lock()
|
||||
co.oddTopics[topicID] = &RWMutexCounterBucket{counter: 1}
|
||||
co.oddLock.Unlock()
|
||||
counter.oddLock.Lock()
|
||||
counter.oddTopics[topicID] = &RWMutexCounterBucket{counter: 1}
|
||||
counter.oddLock.Unlock()
|
||||
}
|
||||
}
|
||||
|
@ -1,20 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"os"
|
||||
)
|
||||
|
||||
func DirSize(path string) (int, error) {
|
||||
var size int64
|
||||
err := filepath.Walk(path, func(_ string, file os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !file.IsDir() {
|
||||
size += file.Size()
|
||||
}
|
||||
return err
|
||||
})
|
||||
return int(size), err
|
||||
}
|
162
common/email.go
162
common/email.go
@ -1,125 +1,75 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"net/mail"
|
||||
"net/smtp"
|
||||
"strings"
|
||||
|
||||
p "git.tuxpa.in/a/gosora/common/phrases"
|
||||
)
|
||||
|
||||
func SendActivationEmail(username, email, token string) error {
|
||||
schema := "http"
|
||||
if Config.SslSchema {
|
||||
schema += "s"
|
||||
}
|
||||
// TODO: Move these to the phrase system
|
||||
subject := "Account Activation - " + Site.Name
|
||||
msg := "Dear " + username + ", to complete your registration on our forums, we need you to validate your email, so that we can confirm that this email actually belongs to you.\n\nClick on the following link to do so. " + schema + "://" + Site.URL + "/user/edit/token/" + token + "\n\nIf you haven't created an account here, then please feel free to ignore this email.\nWe're sorry for the inconvenience this may have caused."
|
||||
return SendEmail(email, subject, msg)
|
||||
type Email struct {
|
||||
UserID int
|
||||
Email string
|
||||
Validated bool
|
||||
Primary bool
|
||||
Token string
|
||||
}
|
||||
|
||||
func SendValidationEmail(username, email, token string) error {
|
||||
schema := "http"
|
||||
if Config.SslSchema {
|
||||
schema += "s"
|
||||
}
|
||||
r := func(body *string) func(name, val string) {
|
||||
return func(name, val string) {
|
||||
*body = strings.Replace(*body, "{{"+name+"}}", val, -1)
|
||||
}
|
||||
}
|
||||
subject := p.GetAccountPhrase("ValidateEmailSubject")
|
||||
r1 := r(&subject)
|
||||
r1("name", Site.Name)
|
||||
body := p.GetAccountPhrase("ValidateEmailBody")
|
||||
r2 := r(&body)
|
||||
r2("username", username)
|
||||
r2("schema", schema)
|
||||
r2("url", Site.URL)
|
||||
r2("token", token)
|
||||
return SendEmail(email, subject, body)
|
||||
func SendValidationEmail(username string, email string, token string) bool {
|
||||
var schema = "http"
|
||||
if Site.EnableSsl {
|
||||
schema += "s"
|
||||
}
|
||||
|
||||
// TODO: Move these to the phrase system
|
||||
subject := "Validate Your Email @ " + Site.Name
|
||||
msg := "Dear " + username + ", following your registration on our forums, we ask you to validate your email, so that we can confirm that this email actually belongs to you.\n\nClick on the following link to do so. " + schema + "://" + Site.URL + "/user/edit/token/" + token + "\n\nIf you haven't created an account here, then please feel free to ignore this email.\nWe're sorry for the inconvenience this may have caused."
|
||||
return SendEmail(email, subject, msg)
|
||||
}
|
||||
|
||||
// TODO: Refactor this
|
||||
func SendEmail(email, subject, msg string) (err error) {
|
||||
// This hook is useful for plugin_sendmail or for testing tools. Possibly to hook it into some sort of mail server?
|
||||
ret, hasHook := GetHookTable().VhookNeedHook("email_send_intercept", email, subject, msg)
|
||||
if hasHook {
|
||||
return ret.(error)
|
||||
}
|
||||
// TODO: Add support for TLS
|
||||
func SendEmail(email string, subject string, msg string) bool {
|
||||
// This hook is useful for plugin_sendmail or for testing tools. Possibly to hook it into some sort of mail server?
|
||||
// TODO: Abstract this
|
||||
if Vhooks["email_send_intercept"] != nil {
|
||||
return Vhooks["email_send_intercept"](email, subject, msg).(bool)
|
||||
}
|
||||
body := "Subject: " + subject + "\n\n" + msg + "\n"
|
||||
|
||||
from := mail.Address{"", Site.Email}
|
||||
to := mail.Address{"", email}
|
||||
headers := make(map[string]string)
|
||||
headers["From"] = from.String()
|
||||
headers["To"] = to.String()
|
||||
headers["Subject"] = subject
|
||||
con, err := smtp.Dial(Config.SMTPServer + ":" + Config.SMTPPort)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
body := ""
|
||||
for k, v := range headers {
|
||||
body += fmt.Sprintf("%s: %s\r\n", k, v)
|
||||
}
|
||||
body += "\r\n" + msg
|
||||
if Config.SMTPUsername != "" {
|
||||
auth := smtp.PlainAuth("", Config.SMTPUsername, Config.SMTPPassword, Config.SMTPServer)
|
||||
err = con.Auth(auth)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
var c *smtp.Client
|
||||
var conn *tls.Conn
|
||||
if Config.SMTPEnableTLS {
|
||||
tlsconfig := &tls.Config{
|
||||
InsecureSkipVerify: true,
|
||||
ServerName: Config.SMTPServer,
|
||||
}
|
||||
conn, err = tls.Dial("tcp", Config.SMTPServer+":"+Config.SMTPPort, tlsconfig)
|
||||
if err != nil {
|
||||
LogWarning(err)
|
||||
return err
|
||||
}
|
||||
c, err = smtp.NewClient(conn, Config.SMTPServer)
|
||||
} else {
|
||||
c, err = smtp.Dial(Config.SMTPServer + ":" + Config.SMTPPort)
|
||||
}
|
||||
if err != nil {
|
||||
LogWarning(err)
|
||||
return err
|
||||
}
|
||||
err = con.Mail(Site.Email)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
err = con.Rcpt(email)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if Config.SMTPUsername != "" {
|
||||
auth := smtp.PlainAuth("", Config.SMTPUsername, Config.SMTPPassword, Config.SMTPServer)
|
||||
err = c.Auth(auth)
|
||||
if err != nil {
|
||||
LogWarning(err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err = c.Mail(from.Address); err != nil {
|
||||
LogWarning(err)
|
||||
return err
|
||||
}
|
||||
if err = c.Rcpt(to.Address); err != nil {
|
||||
LogWarning(err)
|
||||
return err
|
||||
}
|
||||
emailData, err := con.Data()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
_, err = fmt.Fprintf(emailData, body)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
w, err := c.Data()
|
||||
if err != nil {
|
||||
LogWarning(err)
|
||||
return err
|
||||
}
|
||||
_, err = w.Write([]byte(body))
|
||||
if err != nil {
|
||||
LogWarning(err)
|
||||
return err
|
||||
}
|
||||
if err = w.Close(); err != nil {
|
||||
LogWarning(err)
|
||||
return err
|
||||
}
|
||||
if err = c.Quit(); err != nil {
|
||||
LogWarning(err)
|
||||
return err
|
||||
}
|
||||
err = emailData.Close()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return nil
|
||||
return con.Quit() == nil
|
||||
}
|
||||
|
@ -1,90 +1,52 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
import "database/sql"
|
||||
import "../query_gen/lib"
|
||||
|
||||
var Emails EmailStore
|
||||
|
||||
type Email struct {
|
||||
UserID int
|
||||
Email string
|
||||
Validated bool
|
||||
Primary bool
|
||||
Token string
|
||||
}
|
||||
|
||||
type EmailStore interface {
|
||||
// TODO: Add an autoincrement key
|
||||
Get(u *User, email string) (Email, error)
|
||||
GetEmailsByUser(u *User) (emails []Email, err error)
|
||||
Add(uid int, email, token string) error
|
||||
Delete(uid int, email string) error
|
||||
VerifyEmail(email string) error
|
||||
GetEmailsByUser(user *User) (emails []Email, err error)
|
||||
VerifyEmail(email string) error
|
||||
}
|
||||
|
||||
type DefaultEmailStore struct {
|
||||
get *sql.Stmt
|
||||
getEmailsByUser *sql.Stmt
|
||||
add *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
verifyEmail *sql.Stmt
|
||||
getEmailsByUser *sql.Stmt
|
||||
verifyEmail *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultEmailStore(acc *qgen.Accumulator) (*DefaultEmailStore, error) {
|
||||
e := "emails"
|
||||
return &DefaultEmailStore{
|
||||
get: acc.Select(e).Columns("email,validated,token").Where("uid=? AND email=?").Prepare(),
|
||||
getEmailsByUser: acc.Select(e).Columns("email,validated,token").Where("uid=?").Prepare(),
|
||||
add: acc.Insert(e).Columns("uid,email,validated,token").Fields("?,?,?,?").Prepare(),
|
||||
delete: acc.Delete(e).Where("uid=? AND email=?").Prepare(),
|
||||
return &DefaultEmailStore{
|
||||
getEmailsByUser: acc.Select("emails").Columns("email, validated, token").Where("uid = ?").Prepare(),
|
||||
|
||||
// Need to fix this: Empty string isn't working, it gets set to 1 instead x.x -- Has this been fixed?
|
||||
verifyEmail: acc.Update(e).Set("validated=1,token=''").Where("email=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
// Need to fix this: Empty string isn't working, it gets set to 1 instead x.x -- Has this been fixed?
|
||||
verifyEmail: acc.Update("emails").Set("validated = 1, token = ''").Where("email = ?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultEmailStore) Get(user *User, email string) (Email, error) {
|
||||
e := Email{UserID: user.ID, Primary: email != "" && user.Email == email}
|
||||
err := s.get.QueryRow(user.ID, email).Scan(&e.Email, &e.Validated, &e.Token)
|
||||
return e, err
|
||||
func (store *DefaultEmailStore) GetEmailsByUser(user *User) (emails []Email, err error) {
|
||||
email := Email{UserID: user.ID}
|
||||
rows, err := store.getEmailsByUser.Query(user.ID)
|
||||
if err != nil {
|
||||
return emails, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
err := rows.Scan(&email.Email, &email.Validated, &email.Token)
|
||||
if err != nil {
|
||||
return emails, err
|
||||
}
|
||||
|
||||
if email.Email == user.Email {
|
||||
email.Primary = true
|
||||
}
|
||||
emails = append(emails, email)
|
||||
}
|
||||
return emails, rows.Err()
|
||||
}
|
||||
|
||||
func (s *DefaultEmailStore) GetEmailsByUser(user *User) (emails []Email, err error) {
|
||||
e := Email{UserID: user.ID}
|
||||
rows, err := s.getEmailsByUser.Query(user.ID)
|
||||
if err != nil {
|
||||
return emails, err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
err := rows.Scan(&e.Email, &e.Validated, &e.Token)
|
||||
if err != nil {
|
||||
return emails, err
|
||||
}
|
||||
if e.Email == user.Email {
|
||||
e.Primary = true
|
||||
}
|
||||
emails = append(emails, e)
|
||||
}
|
||||
return emails, rows.Err()
|
||||
}
|
||||
|
||||
func (s *DefaultEmailStore) Add(uid int, email, token string) error {
|
||||
email = CanonEmail(SanitiseSingleLine(email))
|
||||
_, err := s.add.Exec(uid, email, 0, token)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *DefaultEmailStore) Delete(uid int, email string) error {
|
||||
_, err := s.delete.Exec(uid, email)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *DefaultEmailStore) VerifyEmail(email string) error {
|
||||
email = CanonEmail(SanitiseSingleLine(email))
|
||||
_, err := s.verifyEmail.Exec(email)
|
||||
return err
|
||||
func (store *DefaultEmailStore) VerifyEmail(email string) error {
|
||||
_, err := store.verifyEmail.Exec(email)
|
||||
return err
|
||||
}
|
||||
|
445
common/errors.go
445
common/errors.go
@ -1,28 +1,23 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
p "git.tuxpa.in/a/gosora/common/phrases"
|
||||
)
|
||||
|
||||
type ErrorItem struct {
|
||||
error
|
||||
Stack []byte
|
||||
error
|
||||
Stack []byte
|
||||
}
|
||||
|
||||
// ! The errorBuffer uses o(n) memory, we should probably do something about that
|
||||
// TODO: Use the errorBuffer variable to construct the system log in the Control Panel. Should we log errors caused by users too? Or just collect statistics on those or do nothing? Intercept recover()? Could we intercept the logger instead here? We might get too much information, if we intercept the logger, maybe make it part of the Debug page?
|
||||
// ? - Should we pass Header / HeaderLite rather than forcing the errors to pull the global Header instance?
|
||||
var errorBufferMutex sync.RWMutex
|
||||
//var errorBuffer []ErrorItem
|
||||
var ErrorCountSinceStartup int64
|
||||
var errorBuffer []ErrorItem
|
||||
|
||||
//var notfoundCountPerSecond int
|
||||
//var nopermsCountPerSecond int
|
||||
@ -32,382 +27,304 @@ var tList []interface{}
|
||||
|
||||
// WIP, a new system to propagate errors up from routes
|
||||
type RouteError interface {
|
||||
Type() string
|
||||
Error() string
|
||||
Cause() string
|
||||
JSON() bool
|
||||
Handled() bool
|
||||
|
||||
Wrap(string)
|
||||
Type() string
|
||||
Error() string
|
||||
JSON() bool
|
||||
Handled() bool
|
||||
}
|
||||
|
||||
type RouteErrorImpl struct {
|
||||
userText string
|
||||
sysText string
|
||||
system bool
|
||||
json bool
|
||||
handled bool
|
||||
text string
|
||||
system bool
|
||||
json bool
|
||||
handled bool
|
||||
}
|
||||
|
||||
func (err *RouteErrorImpl) Type() string {
|
||||
// System errors may contain sensitive information we don't want the user to see
|
||||
if err.system {
|
||||
return "system"
|
||||
}
|
||||
return "user"
|
||||
// System errors may contain sensitive information we don't want the user to see
|
||||
if err.system {
|
||||
return "system"
|
||||
}
|
||||
return "user"
|
||||
}
|
||||
|
||||
func (err *RouteErrorImpl) Error() string {
|
||||
return err.userText
|
||||
}
|
||||
|
||||
func (err *RouteErrorImpl) Cause() string {
|
||||
if err.sysText == "" {
|
||||
return err.Error()
|
||||
}
|
||||
return err.sysText
|
||||
return err.text
|
||||
}
|
||||
|
||||
// Respond with JSON?
|
||||
func (err *RouteErrorImpl) JSON() bool {
|
||||
return err.json
|
||||
return err.json
|
||||
}
|
||||
|
||||
// Has this error been dealt with elsewhere?
|
||||
func (err *RouteErrorImpl) Handled() bool {
|
||||
return err.handled
|
||||
}
|
||||
|
||||
// Move the current error into the system error slot and add a new one to the user error slot to show the user
|
||||
func (err *RouteErrorImpl) Wrap(userErr string) {
|
||||
err.sysText = err.userText
|
||||
err.userText = userErr
|
||||
return err.handled
|
||||
}
|
||||
|
||||
func HandledRouteError() RouteError {
|
||||
return &RouteErrorImpl{"", "", false, false, true}
|
||||
}
|
||||
|
||||
func Error(errmsg string) RouteError {
|
||||
return &RouteErrorImpl{errmsg, "", false, false, false}
|
||||
}
|
||||
|
||||
func FromError(err error) RouteError {
|
||||
return &RouteErrorImpl{err.Error(), "", false, false, false}
|
||||
}
|
||||
|
||||
func ErrorJSQ(errmsg string, js bool) RouteError {
|
||||
return &RouteErrorImpl{errmsg, "", false, js, false}
|
||||
}
|
||||
|
||||
func SysError(errmsg string) RouteError {
|
||||
return &RouteErrorImpl{errmsg, errmsg, true, false, false}
|
||||
return &RouteErrorImpl{"", false, false, true}
|
||||
}
|
||||
|
||||
// LogError logs internal handler errors which can't be handled with InternalError() as a wrapper for log.Fatal(), we might do more with it in the future.
|
||||
// TODO: Clean-up extra as a way of passing additional context
|
||||
func LogError(err error, extra ...string) {
|
||||
LogWarning(err, extra...)
|
||||
ErrLogger.Fatal("")
|
||||
LogWarning(err, extra...)
|
||||
log.Fatal("")
|
||||
}
|
||||
|
||||
func LogWarning(err error, extra ...string) {
|
||||
var esb strings.Builder
|
||||
for _, extraBit := range extra {
|
||||
esb.WriteString(extraBit)
|
||||
esb.WriteRune(10)
|
||||
}
|
||||
if err == nil {
|
||||
esb.WriteString("nil error found")
|
||||
} else {
|
||||
esb.WriteString(err.Error())
|
||||
}
|
||||
esb.WriteRune(10)
|
||||
errmsg := esb.String()
|
||||
|
||||
errorBufferMutex.Lock()
|
||||
defer errorBufferMutex.Unlock()
|
||||
stack := debug.Stack() // debug.Stack() can't be executed concurrently, so we'll guard this with a mutex too
|
||||
Err(errmsg, string(stack))
|
||||
//errorBuffer = append(errorBuffer, ErrorItem{err, stack})
|
||||
atomic.AddInt64(&ErrorCountSinceStartup,1)
|
||||
var errmsg string
|
||||
for _, extraBit := range extra {
|
||||
errmsg += extraBit + "\n"
|
||||
}
|
||||
if err == nil {
|
||||
errmsg += "Unknown error"
|
||||
} else {
|
||||
errmsg += err.Error()
|
||||
}
|
||||
stack := debug.Stack()
|
||||
log.Print(errmsg+"\n", string(stack))
|
||||
errorBufferMutex.Lock()
|
||||
defer errorBufferMutex.Unlock()
|
||||
errorBuffer = append(errorBuffer, ErrorItem{err, stack})
|
||||
}
|
||||
|
||||
func errorHeader(w http.ResponseWriter, u *User, title string) *Header {
|
||||
h := DefaultHeader(w, u)
|
||||
h.Title = title
|
||||
h.Zone = "error"
|
||||
return h
|
||||
func errorHeader(w http.ResponseWriter, user User, title string) *Header {
|
||||
header := DefaultHeader(w, user)
|
||||
header.Title = title
|
||||
return header
|
||||
}
|
||||
|
||||
// TODO: Dump the request?
|
||||
// InternalError is the main function for handling internal errors, while simultaneously printing out a page for the end-user to let them know that *something* has gone wrong
|
||||
// ? - Add a user parameter?
|
||||
// ! Do not call CustomError here or we might get an error loop
|
||||
func InternalError(err error, w http.ResponseWriter, r *http.Request) RouteError {
|
||||
pi := ErrorPage{errorHeader(w, &GuestUser, p.GetErrorPhrase("internal_error_title")), p.GetErrorPhrase("internal_error_body")}
|
||||
handleErrorTemplate(w, r, pi, 500)
|
||||
LogError(err)
|
||||
return HandledRouteError()
|
||||
pi := ErrorPage{errorHeader(w, GuestUser, "Internal Server Error"), "A problem has occurred in the system."}
|
||||
handleErrorTemplate(w, r, pi)
|
||||
LogError(err)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// InternalErrorJSQ is the JSON "maybe" version of InternalError which can handle both JSON and normal requests
|
||||
// ? - Add a user parameter?
|
||||
func InternalErrorJSQ(err error, w http.ResponseWriter, r *http.Request, js bool) RouteError {
|
||||
if !js {
|
||||
return InternalError(err, w, r)
|
||||
}
|
||||
return InternalErrorJS(err, w, r)
|
||||
func InternalErrorJSQ(err error, w http.ResponseWriter, r *http.Request, isJs bool) RouteError {
|
||||
if !isJs {
|
||||
return InternalError(err, w, r)
|
||||
}
|
||||
return InternalErrorJS(err, w, r)
|
||||
}
|
||||
|
||||
// InternalErrorJS is the JSON version of InternalError on routes we know will only be requested via JSON. E.g. An API.
|
||||
// ? - Add a user parameter?
|
||||
func InternalErrorJS(err error, w http.ResponseWriter, r *http.Request) RouteError {
|
||||
w.WriteHeader(500)
|
||||
writeJsonError(p.GetErrorPhrase("internal_error_body"), w)
|
||||
LogError(err)
|
||||
return HandledRouteError()
|
||||
w.WriteHeader(500)
|
||||
writeJsonError("A problem has occurred in the system.", w)
|
||||
LogError(err)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// When the task system detects if the database is down, some database errors might slip by this
|
||||
// When the task system detects if the database is down, some database errors might lip by this
|
||||
func DatabaseError(w http.ResponseWriter, r *http.Request) RouteError {
|
||||
pi := ErrorPage{errorHeader(w, &GuestUser, p.GetErrorPhrase("internal_error_title")), p.GetErrorPhrase("internal_error_body")}
|
||||
handleErrorTemplate(w, r, pi, 500)
|
||||
return HandledRouteError()
|
||||
pi := ErrorPage{errorHeader(w, GuestUser, "Internal Server Error"), "A problem has occurred in the system."}
|
||||
handleErrorTemplate(w, r, pi)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
var xmlInternalError = []byte(`<?xml version="1.0" encoding="UTF-8"?>
|
||||
<error>A problem has occured</error>`)
|
||||
|
||||
func InternalErrorXML(err error, w http.ResponseWriter, r *http.Request) RouteError {
|
||||
w.Header().Set("Content-Type", "application/xml")
|
||||
w.WriteHeader(500)
|
||||
w.Write([]byte(`<?xml version="1.0"encoding="UTF-8"?>
|
||||
<error>` + p.GetErrorPhrase("internal_error_body") + `</error>`))
|
||||
LogError(err)
|
||||
return HandledRouteError()
|
||||
w.Header().Set("Content-Type", "application/xml")
|
||||
w.WriteHeader(500)
|
||||
w.Write(xmlInternalError)
|
||||
LogError(err)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// TODO: Stop killing the instance upon hitting an error with InternalError* and deprecate this
|
||||
func SilentInternalErrorXML(err error, w http.ResponseWriter, r *http.Request) RouteError {
|
||||
w.Header().Set("Content-Type", "application/xml")
|
||||
w.WriteHeader(500)
|
||||
w.Write([]byte(`<?xml version="1.0"encoding="UTF-8"?>
|
||||
<error>` + p.GetErrorPhrase("internal_error_body") + `</error>`))
|
||||
log.Print("InternalError: ", err)
|
||||
return HandledRouteError()
|
||||
w.Header().Set("Content-Type", "application/xml")
|
||||
w.WriteHeader(500)
|
||||
w.Write(xmlInternalError)
|
||||
log.Print("InternalError: ", err)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// ! Do not call CustomError here otherwise we might get an error loop
|
||||
func PreError(errmsg string, w http.ResponseWriter, r *http.Request) RouteError {
|
||||
pi := ErrorPage{errorHeader(w, &GuestUser, p.GetErrorPhrase("error_title")), errmsg}
|
||||
handleErrorTemplate(w, r, pi, 500)
|
||||
return HandledRouteError()
|
||||
w.WriteHeader(500)
|
||||
pi := ErrorPage{errorHeader(w, GuestUser, "Error"), errmsg}
|
||||
handleErrorTemplate(w, r, pi)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
func PreErrorJS(errmsg string, w http.ResponseWriter, r *http.Request) RouteError {
|
||||
w.WriteHeader(500)
|
||||
writeJsonError(errmsg, w)
|
||||
return HandledRouteError()
|
||||
w.WriteHeader(500)
|
||||
writeJsonError(errmsg, w)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
func PreErrorJSQ(errmsg string, w http.ResponseWriter, r *http.Request, js bool) RouteError {
|
||||
if !js {
|
||||
return PreError(errmsg, w, r)
|
||||
}
|
||||
return PreErrorJS(errmsg, w, r)
|
||||
func PreErrorJSQ(errmsg string, w http.ResponseWriter, r *http.Request, isJs bool) RouteError {
|
||||
if !isJs {
|
||||
return PreError(errmsg, w, r)
|
||||
}
|
||||
return PreErrorJS(errmsg, w, r)
|
||||
}
|
||||
|
||||
// LocalError is an error shown to the end-user when something goes wrong and it's not the software's fault
|
||||
// TODO: Pass header in for this and similar errors instead of having to pass in both user and w? Would also allow for more stateful things, although this could be a problem
|
||||
/*func LocalError(errmsg string, w http.ResponseWriter, r *http.Request, user *User) RouteError {
|
||||
w.WriteHeader(500)
|
||||
pi := ErrorPage{errorHeader(w, user, p.GetErrorPhrase("local_error_title")), errmsg}
|
||||
handleErrorTemplate(w, r, pi)
|
||||
return HandledRouteError()
|
||||
}*/
|
||||
|
||||
func LocalError(errmsg string, w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
return SimpleError(errmsg, w, r, errorHeader(w, u, ""))
|
||||
func LocalError(errmsg string, w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
w.WriteHeader(500)
|
||||
pi := ErrorPage{errorHeader(w, user, "Local Error"), errmsg}
|
||||
handleErrorTemplate(w, r, pi)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
func LocalErrorf(errmsg string, w http.ResponseWriter, r *http.Request, u *User, params ...interface{}) RouteError {
|
||||
return LocalError(fmt.Sprintf(errmsg, params), w, r, u)
|
||||
}
|
||||
|
||||
func SimpleError(errmsg string, w http.ResponseWriter, r *http.Request, h *Header) RouteError {
|
||||
if h == nil {
|
||||
h = errorHeader(w, &GuestUser, p.GetErrorPhrase("local_error_title"))
|
||||
} else {
|
||||
h.Title = p.GetErrorPhrase("local_error_title")
|
||||
}
|
||||
pi := ErrorPage{h, errmsg}
|
||||
handleErrorTemplate(w, r, pi, 500)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
func LocalErrorJSQ(errmsg string, w http.ResponseWriter, r *http.Request, u *User, js bool) RouteError {
|
||||
if !js {
|
||||
return SimpleError(errmsg, w, r, errorHeader(w, u, ""))
|
||||
}
|
||||
return LocalErrorJS(errmsg, w, r)
|
||||
func LocalErrorJSQ(errmsg string, w http.ResponseWriter, r *http.Request, user User, isJs bool) RouteError {
|
||||
if !isJs {
|
||||
return LocalError(errmsg, w, r, user)
|
||||
}
|
||||
return LocalErrorJS(errmsg, w, r)
|
||||
}
|
||||
|
||||
func LocalErrorJS(errmsg string, w http.ResponseWriter, r *http.Request) RouteError {
|
||||
w.WriteHeader(500)
|
||||
writeJsonError(errmsg, w)
|
||||
return HandledRouteError()
|
||||
w.WriteHeader(500)
|
||||
writeJsonError(errmsg, w)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// TODO: We might want to centralise the error logic in the future and just return what the error handler needs to construct the response rather than handling it here
|
||||
// NoPermissions is an error shown to the end-user when they try to access an area which they aren't authorised to access
|
||||
func NoPermissions(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
pi := ErrorPage{errorHeader(w, u, p.GetErrorPhrase("no_permissions_title")), p.GetErrorPhrase("no_permissions_body")}
|
||||
handleErrorTemplate(w, r, pi, 403)
|
||||
return HandledRouteError()
|
||||
func NoPermissions(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
w.WriteHeader(403)
|
||||
pi := ErrorPage{errorHeader(w, user, "Local Error"), "You don't have permission to do that."}
|
||||
handleErrorTemplate(w, r, pi)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
func NoPermissionsJSQ(w http.ResponseWriter, r *http.Request, u *User, js bool) RouteError {
|
||||
if !js {
|
||||
return NoPermissions(w, r, u)
|
||||
}
|
||||
return NoPermissionsJS(w, r, u)
|
||||
func NoPermissionsJSQ(w http.ResponseWriter, r *http.Request, user User, isJs bool) RouteError {
|
||||
if !isJs {
|
||||
return NoPermissions(w, r, user)
|
||||
}
|
||||
return NoPermissionsJS(w, r, user)
|
||||
}
|
||||
|
||||
func NoPermissionsJS(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
w.WriteHeader(403)
|
||||
writeJsonError(p.GetErrorPhrase("no_permissions_body"), w)
|
||||
return HandledRouteError()
|
||||
func NoPermissionsJS(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
w.WriteHeader(403)
|
||||
writeJsonError("You don't have permission to do that.", w)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// ? - Is this actually used? Should it be used? A ban in Gosora should be more of a permission revocation to stop them posting rather than something which spits up an error page, right?
|
||||
func Banned(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
pi := ErrorPage{errorHeader(w, u, p.GetErrorPhrase("banned_title")), p.GetErrorPhrase("banned_body")}
|
||||
handleErrorTemplate(w, r, pi, 403)
|
||||
return HandledRouteError()
|
||||
func Banned(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
w.WriteHeader(403)
|
||||
pi := ErrorPage{errorHeader(w, user, "Banned"), "You have been banned from this site."}
|
||||
handleErrorTemplate(w, r, pi)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// nolint
|
||||
// BannedJSQ is the version of the banned error page which handles both JavaScript requests and normal page loads
|
||||
func BannedJSQ(w http.ResponseWriter, r *http.Request, user *User, js bool) RouteError {
|
||||
if !js {
|
||||
return Banned(w, r, user)
|
||||
}
|
||||
return BannedJS(w, r, user)
|
||||
func BannedJSQ(w http.ResponseWriter, r *http.Request, user User, isJs bool) RouteError {
|
||||
if !isJs {
|
||||
return Banned(w, r, user)
|
||||
}
|
||||
return BannedJS(w, r, user)
|
||||
}
|
||||
|
||||
func BannedJS(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
w.WriteHeader(403)
|
||||
writeJsonError(p.GetErrorPhrase("banned_body"), w)
|
||||
return HandledRouteError()
|
||||
func BannedJS(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
w.WriteHeader(403)
|
||||
writeJsonError("You have been banned from this site.", w)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// nolint
|
||||
func LoginRequiredJSQ(w http.ResponseWriter, r *http.Request, u *User, js bool) RouteError {
|
||||
if !js {
|
||||
return LoginRequired(w, r, u)
|
||||
}
|
||||
return LoginRequiredJS(w, r, u)
|
||||
func LoginRequiredJSQ(w http.ResponseWriter, r *http.Request, user User, isJs bool) RouteError {
|
||||
if !isJs {
|
||||
return LoginRequired(w, r, user)
|
||||
}
|
||||
return LoginRequiredJS(w, r, user)
|
||||
}
|
||||
|
||||
// ? - Where is this used? Should we use it more?
|
||||
// LoginRequired is an error shown to the end-user when they try to access an area which requires them to login
|
||||
func LoginRequired(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
return CustomError(p.GetErrorPhrase("login_required_body"), 401, p.GetErrorPhrase("no_permissions_title"), w, r, nil, u)
|
||||
func LoginRequired(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
w.WriteHeader(401)
|
||||
pi := ErrorPage{errorHeader(w, user, "Local Error"), "You need to login to do that."}
|
||||
handleErrorTemplate(w, r, pi)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// nolint
|
||||
func LoginRequiredJS(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
w.WriteHeader(401)
|
||||
writeJsonError(p.GetErrorPhrase("login_required_body"), w)
|
||||
return HandledRouteError()
|
||||
func LoginRequiredJS(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
w.WriteHeader(401)
|
||||
writeJsonError("You need to login to do that.", w)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// SecurityError is used whenever a session mismatch is found
|
||||
// ? - Should we add JS and JSQ versions of this?
|
||||
func SecurityError(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
pi := ErrorPage{errorHeader(w, u, p.GetErrorPhrase("security_error_title")), p.GetErrorPhrase("security_error_body")}
|
||||
w.Header().Set("Content-Type", "text/html;charset=utf-8")
|
||||
w.WriteHeader(403)
|
||||
e := RenderTemplateAlias("error", "security_error", w, r, pi.Header, pi)
|
||||
if e != nil {
|
||||
LogError(e)
|
||||
}
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
var microNotFoundBytes = []byte("file not found")
|
||||
func MicroNotFound(w http.ResponseWriter, r *http.Request) RouteError {
|
||||
w.Header().Set("Content-Type", "text/html;charset=utf-8")
|
||||
w.WriteHeader(404)
|
||||
_, _ = w.Write(microNotFoundBytes)
|
||||
return HandledRouteError()
|
||||
func SecurityError(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
w.WriteHeader(403)
|
||||
pi := ErrorPage{errorHeader(w, user, "Security Error"), "There was a security issue with your request."}
|
||||
if RunPreRenderHook("pre_render_security_error", w, r, &user, &pi) {
|
||||
return nil
|
||||
}
|
||||
err := Templates.ExecuteTemplate(w, "error.html", pi)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// NotFound is used when the requested page doesn't exist
|
||||
// ? - Add a JSQ version of this?
|
||||
// ? - Add a JSQ and JS version of this?
|
||||
// ? - Add a user parameter?
|
||||
func NotFound(w http.ResponseWriter, r *http.Request, h *Header) RouteError {
|
||||
return CustomError(p.GetErrorPhrase("not_found_body"), 404, p.GetErrorPhrase("not_found_title"), w, r, h, &GuestUser)
|
||||
}
|
||||
|
||||
// ? - Add a user parameter?
|
||||
func NotFoundJS(w http.ResponseWriter, r *http.Request) RouteError {
|
||||
w.WriteHeader(404)
|
||||
writeJsonError(p.GetErrorPhrase("not_found_body"), w)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
func NotFoundJSQ(w http.ResponseWriter, r *http.Request, h *Header, js bool) RouteError {
|
||||
if js {
|
||||
return NotFoundJS(w, r)
|
||||
}
|
||||
if h == nil {
|
||||
h = DefaultHeader(w, &GuestUser)
|
||||
}
|
||||
return NotFound(w, r, h)
|
||||
func NotFound(w http.ResponseWriter, r *http.Request, header *Header) RouteError {
|
||||
return CustomError("The requested page doesn't exist.", 404, "Not Found", w, r, header, GuestUser)
|
||||
}
|
||||
|
||||
// CustomError lets us make custom error types which aren't covered by the generic functions above
|
||||
func CustomError(errmsg string, errcode int, errtitle string, w http.ResponseWriter, r *http.Request, h *Header, u *User) (rerr RouteError) {
|
||||
if h == nil {
|
||||
h, rerr = UserCheck(w, r, u)
|
||||
if rerr != nil {
|
||||
h = errorHeader(w, u, errtitle)
|
||||
}
|
||||
}
|
||||
h.Title = errtitle
|
||||
h.Zone = "error"
|
||||
pi := ErrorPage{h, errmsg}
|
||||
handleErrorTemplate(w, r, pi, errcode)
|
||||
return HandledRouteError()
|
||||
func CustomError(errmsg string, errcode int, errtitle string, w http.ResponseWriter, r *http.Request, header *Header, user User) RouteError {
|
||||
if header == nil {
|
||||
header = DefaultHeader(w, user)
|
||||
}
|
||||
w.WriteHeader(errcode)
|
||||
pi := ErrorPage{header, errmsg}
|
||||
handleErrorTemplate(w, r, pi)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// CustomErrorJSQ is a version of CustomError which lets us handle both JSON and regular pages depending on how it's being accessed
|
||||
func CustomErrorJSQ(errmsg string, errcode int, errtitle string, w http.ResponseWriter, r *http.Request, h *Header, u *User, js bool) RouteError {
|
||||
if !js {
|
||||
return CustomError(errmsg, errcode, errtitle, w, r, h, u)
|
||||
}
|
||||
return CustomErrorJS(errmsg, errcode, w, r, u)
|
||||
func CustomErrorJSQ(errmsg string, errcode int, errtitle string, w http.ResponseWriter, r *http.Request, header *Header, user User, isJs bool) RouteError {
|
||||
if !isJs {
|
||||
if header == nil {
|
||||
header = DefaultHeader(w, user)
|
||||
}
|
||||
return CustomError(errmsg, errcode, errtitle, w, r, header, user)
|
||||
}
|
||||
return CustomErrorJS(errmsg, errcode, w, r, user)
|
||||
}
|
||||
|
||||
// CustomErrorJS is the pure JSON version of CustomError
|
||||
func CustomErrorJS(errmsg string, errcode int, w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
w.WriteHeader(errcode)
|
||||
writeJsonError(errmsg, w)
|
||||
return HandledRouteError()
|
||||
func CustomErrorJS(errmsg string, errcode int, w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
w.WriteHeader(errcode)
|
||||
writeJsonError(errmsg, w)
|
||||
return HandledRouteError()
|
||||
}
|
||||
|
||||
// TODO: Should we optimise this by caching these json strings?
|
||||
func writeJsonError(errmsg string, w http.ResponseWriter) {
|
||||
_, _ = w.Write([]byte(`{"errmsg":"` + strings.Replace(errmsg, "\"", "", -1) + `"}`))
|
||||
_, _ = w.Write([]byte(`{"errmsg":"` + strings.Replace(errmsg, "\"", "", -1) + `"}`))
|
||||
}
|
||||
|
||||
func handleErrorTemplate(w http.ResponseWriter, r *http.Request, pi ErrorPage, errcode int) {
|
||||
w.Header().Set("Content-Type", "text/html;charset=utf-8")
|
||||
w.WriteHeader(errcode)
|
||||
err := RenderTemplateAlias("error", "error", w, r, pi.Header, pi)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
func handleErrorTemplate(w http.ResponseWriter, r *http.Request, pi ErrorPage) {
|
||||
// TODO: What to do about this hook?
|
||||
if RunPreRenderHook("pre_render_error", w, r, &pi.Header.CurrentUser, &pi) {
|
||||
return
|
||||
}
|
||||
err := RunThemeTemplate(pi.Header.Theme.Name, "error", pi, w)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
}
|
||||
|
||||
// Alias of routes.renderTemplate
|
||||
var RenderTemplateAlias func(tmplName, hookName string, w http.ResponseWriter, r *http.Request, h *Header, pi interface{}) error
|
||||
|
926
common/extend.go
926
common/extend.go
File diff suppressed because it is too large
Load Diff
720
common/files.go
720
common/files.go
@ -3,508 +3,326 @@ package common
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"mime"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
tmpl "git.tuxpa.in/a/gosora/tmpl_client"
|
||||
"github.com/andybalholm/brotli"
|
||||
"../tmpl_client"
|
||||
)
|
||||
|
||||
//type SFileList map[string]*SFile
|
||||
//type SFileListShort map[string]*SFile
|
||||
type SFileList map[string]SFile
|
||||
|
||||
var StaticFiles = SFileList{"/s/", make(map[string]*SFile), make(map[string]*SFile)}
|
||||
|
||||
//var StaticFilesShort SFileList = make(map[string]*SFile)
|
||||
var StaticFiles SFileList = make(map[string]SFile)
|
||||
var staticFileMutex sync.RWMutex
|
||||
|
||||
// ? Is it efficient to have two maps for this?
|
||||
type SFileList struct {
|
||||
Prefix string
|
||||
Long map[string]*SFile
|
||||
Short map[string]*SFile
|
||||
}
|
||||
|
||||
type SFile struct {
|
||||
// TODO: Move these to the end?
|
||||
Data []byte
|
||||
GzipData []byte
|
||||
BrData []byte
|
||||
|
||||
Sha256 string
|
||||
Sha256I string
|
||||
OName string
|
||||
Pos int64
|
||||
|
||||
Length int64
|
||||
StrLength string
|
||||
GzipLength int64
|
||||
StrGzipLength string
|
||||
BrLength int64
|
||||
StrBrLength string
|
||||
|
||||
Mimetype string
|
||||
Info os.FileInfo
|
||||
FormattedModTime string
|
||||
Data []byte
|
||||
GzipData []byte
|
||||
Pos int64
|
||||
Length int64
|
||||
GzipLength int64
|
||||
Mimetype string
|
||||
Info os.FileInfo
|
||||
FormattedModTime string
|
||||
}
|
||||
|
||||
type CSSData struct {
|
||||
Phrases map[string]string
|
||||
Phrases map[string]string
|
||||
}
|
||||
|
||||
func (l SFileList) JSTmplInit() error {
|
||||
DebugLog("Initialising the client side templates")
|
||||
return filepath.Walk("./tmpl_client", func(path string, f os.FileInfo, err error) error {
|
||||
if f.IsDir() || strings.HasSuffix(path, "tmpl_list.go") || strings.HasSuffix(path, "stub.go") {
|
||||
return nil
|
||||
}
|
||||
path = strings.Replace(path, "\\", "/", -1)
|
||||
DebugLog("Processing client template " + path)
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
func (list SFileList) JSTmplInit() error {
|
||||
DebugLog("Initialising the client side templates")
|
||||
var fragMap = make(map[string][][]byte)
|
||||
fragMap["alert"] = tmpl.GetFrag("alert")
|
||||
fragMap["topics_topic"] = tmpl.GetFrag("topics_topic")
|
||||
fragMap["topic_posts"] = tmpl.GetFrag("topic_posts")
|
||||
fragMap["topic_alt_posts"] = tmpl.GetFrag("topic_alt_posts")
|
||||
DebugLog("fragMap: ", fragMap)
|
||||
return filepath.Walk("./tmpl_client", func(path string, f os.FileInfo, err error) error {
|
||||
if f.IsDir() {
|
||||
return nil
|
||||
}
|
||||
if strings.HasSuffix(path, "template_list.go") || strings.HasSuffix(path, "stub.go") {
|
||||
return nil
|
||||
}
|
||||
|
||||
path = strings.TrimPrefix(path, "tmpl_client/")
|
||||
tmplName := strings.TrimSuffix(path, ".jgo")
|
||||
shortName := strings.TrimPrefix(tmplName, "tmpl_")
|
||||
path = strings.Replace(path, "\\", "/", -1)
|
||||
DebugLog("Processing client template " + path)
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
replace := func(data []byte, replaceThis, withThis string) []byte {
|
||||
return bytes.Replace(data, []byte(replaceThis), []byte(withThis), -1)
|
||||
}
|
||||
rep := func(replaceThis, withThis string) {
|
||||
data = replace(data, replaceThis, withThis)
|
||||
}
|
||||
path = strings.TrimPrefix(path, "tmpl_client/")
|
||||
tmplName := strings.TrimSuffix(path, ".go")
|
||||
shortName := strings.TrimPrefix(tmplName, "template_")
|
||||
|
||||
startIndex, hasFunc := skipAllUntilCharsExist(data, 0, []byte("if(tmplInits===undefined)"))
|
||||
if !hasFunc {
|
||||
return errors.New("no init map found")
|
||||
}
|
||||
data = data[startIndex-len([]byte("if(tmplInits===undefined)")):]
|
||||
rep("// nolint", "")
|
||||
//rep("func ", "function ")
|
||||
rep("func ", "function ")
|
||||
rep(" error {\n", " {\nlet o=\"\"\n")
|
||||
funcIndex, hasFunc := skipAllUntilCharsExist(data, 0, []byte("function Tmpl_"))
|
||||
if !hasFunc {
|
||||
return errors.New("no template function found")
|
||||
}
|
||||
spaceIndex, hasSpace := skipUntilIfExists(data, funcIndex, ' ')
|
||||
if !hasSpace {
|
||||
return errors.New("no spaces found after the template function name")
|
||||
}
|
||||
endBrace, hasBrace := skipUntilIfExists(data, spaceIndex, ')')
|
||||
if !hasBrace {
|
||||
return errors.New("no right brace found after the template function name")
|
||||
}
|
||||
fmt.Println("spaceIndex: ", spaceIndex)
|
||||
fmt.Println("endBrace: ", endBrace)
|
||||
fmt.Println("string(data[spaceIndex:endBrace]): ", string(data[spaceIndex:endBrace]))
|
||||
var replace = func(data []byte, replaceThis string, withThis string) []byte {
|
||||
return bytes.Replace(data, []byte(replaceThis), []byte(withThis), -1)
|
||||
}
|
||||
|
||||
preLen := len(data)
|
||||
rep(string(data[spaceIndex:endBrace]), "")
|
||||
rep("))\n", " \n")
|
||||
endBrace -= preLen - len(data) // Offset it as we've deleted portions
|
||||
fmt.Println("new endBrace: ", endBrace)
|
||||
fmt.Println("data: ", string(data))
|
||||
startIndex, hasFunc := skipAllUntilCharsExist(data, 0, []byte("func init() {"))
|
||||
if !hasFunc {
|
||||
return errors.New("no init function found")
|
||||
}
|
||||
data = data[startIndex-len([]byte("func init() {")):]
|
||||
data = replace(data, "func ", "function ")
|
||||
data = replace(data, "function init() {", "tmplInits[\""+tmplName+"\"] = ")
|
||||
data = replace(data, " error {\n", " {\nlet out = \"\"\n")
|
||||
funcIndex, hasFunc := skipAllUntilCharsExist(data, 0, []byte("function Template_"))
|
||||
if !hasFunc {
|
||||
return errors.New("no template function found")
|
||||
}
|
||||
spaceIndex, hasSpace := skipUntilIfExists(data, funcIndex, ' ')
|
||||
if !hasSpace {
|
||||
return errors.New("no spaces found after the template function name")
|
||||
}
|
||||
endBrace, hasBrace := skipUntilIfExists(data, spaceIndex, ')')
|
||||
if !hasBrace {
|
||||
return errors.New("no right brace found after the template function name")
|
||||
}
|
||||
fmt.Println("spaceIndex: ", spaceIndex)
|
||||
fmt.Println("endBrace: ", endBrace)
|
||||
fmt.Println("string(data[spaceIndex:endBrace]): ", string(data[spaceIndex:endBrace]))
|
||||
|
||||
/*showPos := func(data []byte, index int) (out string) {
|
||||
out = "["
|
||||
for j, char := range data {
|
||||
if index == j {
|
||||
out += "[" + string(char) + "] "
|
||||
} else {
|
||||
out += string(char) + " "
|
||||
}
|
||||
}
|
||||
return out + "]"
|
||||
}*/
|
||||
preLen := len(data)
|
||||
data = replace(data, string(data[spaceIndex:endBrace]), "")
|
||||
data = replace(data, "))\n", "\n")
|
||||
endBrace -= preLen - len(data) // Offset it as we've deleted portions
|
||||
fmt.Println("new endBrace: ", endBrace)
|
||||
fmt.Println("data: ", string(data))
|
||||
|
||||
// ? Can we just use a regex? I'm thinking of going more efficient, or just outright rolling wasm, this is a temp hack in a place where performance doesn't particularly matter
|
||||
each := func(phrase string, h func(index int)) {
|
||||
//fmt.Println("find each '" + phrase + "'")
|
||||
index := endBrace
|
||||
if index < 0 {
|
||||
panic("index under zero: " + strconv.Itoa(index))
|
||||
}
|
||||
var foundIt bool
|
||||
for {
|
||||
//fmt.Println("in index: ", index)
|
||||
//fmt.Println("pos: ", showPos(data, index))
|
||||
index, foundIt = skipAllUntilCharsExist(data, index, []byte(phrase))
|
||||
if !foundIt {
|
||||
break
|
||||
}
|
||||
h(index)
|
||||
}
|
||||
}
|
||||
each("strconv.Itoa(", func(index int) {
|
||||
braceAt, hasEndBrace := skipUntilIfExistsOrLine(data, index, ')')
|
||||
if hasEndBrace {
|
||||
data[braceAt] = ' ' // Blank it
|
||||
}
|
||||
})
|
||||
each("[]byte(", func(index int) {
|
||||
braceAt, hasEndBrace := skipUntilIfExistsOrLine(data, index, ')')
|
||||
if hasEndBrace {
|
||||
data[braceAt] = ' ' // Blank it
|
||||
}
|
||||
})
|
||||
each("StringToBytes(", func(index int) {
|
||||
braceAt, hasEndBrace := skipUntilIfExistsOrLine(data, index, ')')
|
||||
if hasEndBrace {
|
||||
data[braceAt] = ' ' // Blank it
|
||||
}
|
||||
})
|
||||
each("w.Write(", func(index int) {
|
||||
braceAt, hasEndBrace := skipUntilIfExistsOrLine(data, index, ')')
|
||||
if hasEndBrace {
|
||||
data[braceAt] = ' ' // Blank it
|
||||
}
|
||||
})
|
||||
each("RelativeTime(", func(index int) {
|
||||
braceAt, _ := skipUntilIfExistsOrLine(data, index, 10)
|
||||
if data[braceAt-1] == ' ' {
|
||||
data[braceAt-1] = ' ' // Blank it
|
||||
}
|
||||
})
|
||||
each("if ", func(index int) {
|
||||
//fmt.Println("if index: ", index)
|
||||
braceAt, hasBrace := skipUntilIfExistsOrLine(data, index, '{')
|
||||
if hasBrace {
|
||||
if data[braceAt-1] != ' ' {
|
||||
panic("couldn't find space before brace, found ' " + string(data[braceAt-1]) + "' instead")
|
||||
}
|
||||
data[braceAt-1] = ')' // Drop a brace here to satisfy JS
|
||||
}
|
||||
})
|
||||
each("for _, item := range ", func(index int) {
|
||||
//fmt.Println("for index: ", index)
|
||||
braceAt, hasBrace := skipUntilIfExists(data, index, '{')
|
||||
if hasBrace {
|
||||
if data[braceAt-1] != ' ' {
|
||||
panic("couldn't find space before brace, found ' " + string(data[braceAt-1]) + "' instead")
|
||||
}
|
||||
data[braceAt-1] = ')' // Drop a brace here to satisfy JS
|
||||
}
|
||||
})
|
||||
rep("for _, item := range ", "for(item of ")
|
||||
rep("w.Write([]byte(", "o += ")
|
||||
rep("w.Write(StringToBytes(", "o += ")
|
||||
rep("w.Write(", "o += ")
|
||||
rep("+= c.", "+= ")
|
||||
rep("strconv.Itoa(", "")
|
||||
rep("strconv.FormatInt(", "")
|
||||
rep(" c.", "")
|
||||
rep("phrases.", "")
|
||||
rep(", 10;", "")
|
||||
/*var showPos = func(data []byte, index int) (out string) {
|
||||
out = "["
|
||||
for j, char := range data {
|
||||
if index == j {
|
||||
out += "[" + string(char) + "] "
|
||||
} else {
|
||||
out += string(char) + " "
|
||||
}
|
||||
}
|
||||
return out + "]"
|
||||
}*/
|
||||
|
||||
//rep("var plist = GetTmplPhrasesBytes("+shortName+"_tmpl_phrase_id)", "const plist = tmplPhrases[\""+tmplName+"\"];")
|
||||
//rep("//var plist = GetTmplPhrasesBytes("+shortName+"_tmpl_phrase_id)", "const "+shortName+"_phrase_arr = tmplPhrases[\""+tmplName+"\"];")
|
||||
rep("//var plist = GetTmplPhrasesBytes("+shortName+"_tmpl_phrase_id)", "const pl=tmplPhrases[\""+tmplName+"\"];")
|
||||
rep(shortName+"_phrase_arr", "pl")
|
||||
rep(shortName+"_phrase", "pl")
|
||||
rep("tmpl_"+shortName+"_vars", "t_v")
|
||||
|
||||
rep("var c_v_", "let c_v_")
|
||||
rep(`t_vars, ok := tmpl_i.`, `/*`)
|
||||
rep("[]byte(", "")
|
||||
rep("StringToBytes(", "")
|
||||
rep("RelativeTime(t_v.", "t_v.Relative")
|
||||
// TODO: Format dates properly on the client side
|
||||
rep(".Format(\"2006-01-02 15:04:05\"", "")
|
||||
rep(", 10", "")
|
||||
rep("if ", "if(")
|
||||
rep("return nil", "return o")
|
||||
rep(" )", ")")
|
||||
rep(" \n", "\n")
|
||||
rep("\n", ";\n")
|
||||
rep("{;", "{")
|
||||
rep("};", "}")
|
||||
rep("[;", "[")
|
||||
rep(",;", ",")
|
||||
rep("=;", "=")
|
||||
rep(`,
|
||||
});
|
||||
// ? Can we just use a regex? I'm thinking of going more efficient, or just outright rolling wasm, this is a temp hack in a place where performance doesn't particularly matter
|
||||
var each = func(phrase string, handle func(index int)) {
|
||||
//fmt.Println("find each '" + phrase + "'")
|
||||
var index = endBrace
|
||||
if index < 0 {
|
||||
panic("index under zero: " + strconv.Itoa(index))
|
||||
}
|
||||
var foundIt bool
|
||||
for {
|
||||
//fmt.Println("in index: ", index)
|
||||
//fmt.Println("pos: ", showPos(data, index))
|
||||
index, foundIt = skipAllUntilCharsExist(data, index, []byte(phrase))
|
||||
if !foundIt {
|
||||
break
|
||||
}
|
||||
handle(index)
|
||||
}
|
||||
}
|
||||
each("strconv.Itoa(", func(index int) {
|
||||
braceAt, hasEndBrace := skipUntilIfExists(data, index, ')')
|
||||
// TODO: Make sure we don't go onto the next line in case someone misplaced a brace
|
||||
if hasEndBrace {
|
||||
data[braceAt] = ' ' // Blank it
|
||||
}
|
||||
})
|
||||
each("w.Write([]byte(", func(index int) {
|
||||
braceAt, hasEndBrace := skipUntilIfExists(data, index, ')')
|
||||
// TODO: Make sure we don't go onto the next line in case someone misplaced a brace
|
||||
if hasEndBrace {
|
||||
data[braceAt] = ' ' // Blank it
|
||||
}
|
||||
braceAt, hasEndBrace = skipUntilIfExists(data, braceAt, ')')
|
||||
if hasEndBrace {
|
||||
data[braceAt] = ' ' // Blank this one too
|
||||
}
|
||||
})
|
||||
each("w.Write(", func(index int) {
|
||||
braceAt, hasEndBrace := skipUntilIfExists(data, index, ')')
|
||||
// TODO: Make sure we don't go onto the next line in case someone misplaced a brace
|
||||
if hasEndBrace {
|
||||
data[braceAt] = ' ' // Blank it
|
||||
}
|
||||
})
|
||||
each("if ", func(index int) {
|
||||
//fmt.Println("if index: ", index)
|
||||
braceAt, hasBrace := skipUntilIfExists(data, index, '{')
|
||||
if hasBrace {
|
||||
if data[braceAt-1] != ' ' {
|
||||
panic("couldn't find space before brace, found ' " + string(data[braceAt-1]) + "' instead")
|
||||
}
|
||||
data[braceAt-1] = ')' // Drop a brace here to satisfy JS
|
||||
}
|
||||
})
|
||||
each("for _, item := range ", func(index int) {
|
||||
//fmt.Println("for index: ", index)
|
||||
braceAt, hasBrace := skipUntilIfExists(data, index, '{')
|
||||
if hasBrace {
|
||||
if data[braceAt-1] != ' ' {
|
||||
panic("couldn't find space before brace, found ' " + string(data[braceAt-1]) + "' instead")
|
||||
}
|
||||
data[braceAt-1] = ')' // Drop a brace here to satisfy JS
|
||||
}
|
||||
})
|
||||
data = replace(data, "for _, item := range ", "for(item of ")
|
||||
data = replace(data, "w.Write([]byte(", "out += ")
|
||||
data = replace(data, "w.Write(", "out += ")
|
||||
data = replace(data, "strconv.Itoa(", "")
|
||||
data = replace(data, "strconv.FormatInt(", "")
|
||||
data = replace(data, "common.", "")
|
||||
data = replace(data, ", 10;", "")
|
||||
data = replace(data, shortName+"_tmpl_phrase_id = RegisterTmplPhraseNames([]string{", "[")
|
||||
data = replace(data, "var phrases = GetTmplPhrasesBytes("+shortName+"_tmpl_phrase_id)", "let phrases = tmplPhrases[\""+tmplName+"\"];")
|
||||
//data = replace(data, "var phrases = GetTmplPhrasesBytes("+shortName+"_tmpl_phrase_id)", "let phrases = tmplPhrases[\""+tmplName+"\"];\nconsole.log('tmplName:','"+tmplName+"')\nconsole.log('phrases:', phrases);")
|
||||
data = replace(data, "if ", "if(")
|
||||
data = replace(data, "return nil", "return out")
|
||||
data = replace(data, " )", ")")
|
||||
data = replace(data, " \n", "\n")
|
||||
data = replace(data, "\n", ";\n")
|
||||
data = replace(data, "{;", "{")
|
||||
data = replace(data, "};", "}")
|
||||
data = replace(data, "[;", "[")
|
||||
data = replace(data, ";;", ";")
|
||||
data = replace(data, ",;", ",")
|
||||
data = replace(data, "=;", "=")
|
||||
data = replace(data, `,
|
||||
});
|
||||
}`, "\n\t];")
|
||||
rep(`=
|
||||
}`, "=[]")
|
||||
rep("o += ", "o+=")
|
||||
rep(shortName+"_frags[", "fr[")
|
||||
rep("function Tmpl_"+shortName+"(t_v) {", "var Tmpl_"+shortName+"=(t_v)=>{")
|
||||
data = replace(data, `=
|
||||
}`, "= []")
|
||||
|
||||
fragset := tmpl.GetFrag(shortName)
|
||||
if fragset != nil {
|
||||
//sfrags := []byte("let " + shortName + "_frags=[\n")
|
||||
sfrags := []byte("{const fr=[")
|
||||
for i, frags := range fragset {
|
||||
//sfrags = append(sfrags, []byte(shortName+"_frags.push(`"+string(frags)+"`);\n")...)
|
||||
//sfrags = append(sfrags, []byte("`"+string(frags)+"`,\n")...)
|
||||
if i == 0 {
|
||||
sfrags = append(sfrags, []byte("`"+string(frags)+"`")...)
|
||||
} else {
|
||||
sfrags = append(sfrags, []byte(",`"+string(frags)+"`")...)
|
||||
}
|
||||
}
|
||||
//sfrags = append(sfrags, []byte("];\n")...)
|
||||
sfrags = append(sfrags, []byte("];")...)
|
||||
data = append(sfrags, data...)
|
||||
}
|
||||
rep("\n;", "\n")
|
||||
rep(";;", ";")
|
||||
fragset, ok := fragMap[shortName]
|
||||
if !ok {
|
||||
DebugLog("tmplName: ", tmplName)
|
||||
return errors.New("couldn't find template in fragmap")
|
||||
}
|
||||
|
||||
data = append(data, '}')
|
||||
for name, _ := range Themes {
|
||||
if strings.HasSuffix(shortName, "_"+name) {
|
||||
data = append(data, "var Tmpl_"+strings.TrimSuffix(shortName, "_"+name)+"=Tmpl_"+shortName+";"...)
|
||||
break
|
||||
}
|
||||
}
|
||||
var sfrags = []byte("let " + shortName + "_frags = [];\n")
|
||||
for _, frags := range fragset {
|
||||
sfrags = append(sfrags, []byte(shortName+"_frags.push(`"+string(frags)+"`);\n")...)
|
||||
}
|
||||
data = append(sfrags, data...)
|
||||
data = replace(data, "\n;", "\n")
|
||||
|
||||
path = tmplName + ".js"
|
||||
DebugLog("js path: ", path)
|
||||
ext := filepath.Ext("/tmpl_client/" + path)
|
||||
path = tmplName + ".js"
|
||||
DebugLog("js path: ", path)
|
||||
var ext = filepath.Ext("/tmpl_client/" + path)
|
||||
gzipData, err := compressBytesGzip(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
brData, err := CompressBytesBrotli(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Don't use Brotli if we get meagre gains from it as it takes longer to process the responses
|
||||
if len(brData) >= (len(data) + 110) {
|
||||
brData = nil
|
||||
} else {
|
||||
diff := len(data) - len(brData)
|
||||
if diff <= len(data)/100 {
|
||||
brData = nil
|
||||
}
|
||||
}
|
||||
list.Set("/static/"+path, SFile{data, gzipData, 0, int64(len(data)), int64(len(gzipData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
|
||||
gzipData, err := CompressBytesGzip(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Don't use Gzip if we get meagre gains from it as it takes longer to process the responses
|
||||
if len(gzipData) >= (len(data) + 120) {
|
||||
gzipData = nil
|
||||
} else {
|
||||
diff := len(data) - len(gzipData)
|
||||
if diff <= len(data)/100 {
|
||||
gzipData = nil
|
||||
}
|
||||
}
|
||||
|
||||
// Get a checksum for CSPs and cache busting
|
||||
hasher := sha256.New()
|
||||
hasher.Write(data)
|
||||
sum := hasher.Sum(nil)
|
||||
checksum := hex.EncodeToString(sum)
|
||||
integrity := base64.StdEncoding.EncodeToString(sum)
|
||||
|
||||
l.Set(l.Prefix+path, &SFile{data, gzipData, brData, checksum, integrity, l.Prefix + path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
|
||||
DebugLogf("Added the '%s' static file.", path)
|
||||
return nil
|
||||
})
|
||||
DebugLogf("Added the '%s' static file.", path)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (l SFileList) Init() error {
|
||||
return filepath.Walk("./public", func(path string, f os.FileInfo, err error) error {
|
||||
if f.IsDir() {
|
||||
return nil
|
||||
}
|
||||
func (list SFileList) Init() error {
|
||||
return filepath.Walk("./public", func(path string, f os.FileInfo, err error) error {
|
||||
if f.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
path = strings.Replace(path, "\\", "/", -1)
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
path = strings.TrimPrefix(path, "public/")
|
||||
ext := filepath.Ext("/public/" + path)
|
||||
if ext == ".js" {
|
||||
data = bytes.Replace(data, []byte("\r"), []byte(""), -1)
|
||||
}
|
||||
mimetype := mime.TypeByExtension(ext)
|
||||
path = strings.Replace(path, "\\", "/", -1)
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Get a checksum for CSPs and cache busting
|
||||
hasher := sha256.New()
|
||||
hasher.Write(data)
|
||||
sum := hasher.Sum(nil)
|
||||
checksum := hex.EncodeToString(sum)
|
||||
integrity := base64.StdEncoding.EncodeToString(sum)
|
||||
path = strings.TrimPrefix(path, "public/")
|
||||
var ext = filepath.Ext("/public/" + path)
|
||||
mimetype := mime.TypeByExtension(ext)
|
||||
|
||||
// Avoid double-compressing images
|
||||
var gzipData, brData []byte
|
||||
if mimetype != "image/jpeg" && mimetype != "image/png" && mimetype != "image/gif" {
|
||||
brData, err = CompressBytesBrotli(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Don't use Brotli if we get meagre gains from it as it takes longer to process the responses
|
||||
if len(brData) >= (len(data) + 130) {
|
||||
brData = nil
|
||||
} else {
|
||||
diff := len(data) - len(brData)
|
||||
if diff <= len(data)/100 {
|
||||
brData = nil
|
||||
}
|
||||
}
|
||||
// Avoid double-compressing images
|
||||
var gzipData []byte
|
||||
if mimetype != "image/jpeg" && mimetype != "image/png" && mimetype != "image/gif" {
|
||||
gzipData, err = compressBytesGzip(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Don't use Gzip if we get meagre gains from it as it takes longer to process the responses
|
||||
if len(gzipData) >= (len(data) + 100) {
|
||||
gzipData = nil
|
||||
} else {
|
||||
diff := len(data) - len(gzipData)
|
||||
if diff <= len(data)/100 {
|
||||
gzipData = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gzipData, err = CompressBytesGzip(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Don't use Gzip if we get meagre gains from it as it takes longer to process the responses
|
||||
if len(gzipData) >= (len(data) + 150) {
|
||||
gzipData = nil
|
||||
} else {
|
||||
diff := len(data) - len(gzipData)
|
||||
if diff <= len(data)/100 {
|
||||
gzipData = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
list.Set("/static/"+path, SFile{data, gzipData, 0, int64(len(data)), int64(len(gzipData)), mimetype, f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
|
||||
l.Set(l.Prefix+path, &SFile{data, gzipData, brData, checksum, integrity, l.Prefix + path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mimetype, f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
|
||||
DebugLogf("Added the '%s' static file.", path)
|
||||
return nil
|
||||
})
|
||||
DebugLogf("Added the '%s' static file.", path)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (l SFileList) Add(path, prefix string) error {
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fi, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
f, err := fi.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
func (list SFileList) Add(path string, prefix string) error {
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fi, err := os.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
f, err := fi.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ext := filepath.Ext(path)
|
||||
path = strings.TrimPrefix(path, prefix)
|
||||
var ext = filepath.Ext(path)
|
||||
path = strings.TrimPrefix(path, prefix)
|
||||
gzipData, err := compressBytesGzip(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
brData, err := CompressBytesBrotli(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Don't use Brotli if we get meagre gains from it as it takes longer to process the responses
|
||||
if len(brData) >= (len(data) + 130) {
|
||||
brData = nil
|
||||
} else {
|
||||
diff := len(data) - len(brData)
|
||||
if diff <= len(data)/100 {
|
||||
brData = nil
|
||||
}
|
||||
}
|
||||
list.Set("/static"+path, SFile{data, gzipData, 0, int64(len(data)), int64(len(gzipData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
|
||||
gzipData, err := CompressBytesGzip(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Don't use Gzip if we get meagre gains from it as it takes longer to process the responses
|
||||
if len(gzipData) >= (len(data) + 150) {
|
||||
gzipData = nil
|
||||
} else {
|
||||
diff := len(data) - len(gzipData)
|
||||
if diff <= len(data)/100 {
|
||||
gzipData = nil
|
||||
}
|
||||
}
|
||||
|
||||
// Get a checksum for CSPs and cache busting
|
||||
hasher := sha256.New()
|
||||
hasher.Write(data)
|
||||
sum := hasher.Sum(nil)
|
||||
checksum := hex.EncodeToString(sum)
|
||||
integrity := base64.StdEncoding.EncodeToString(sum)
|
||||
|
||||
l.Set(l.Prefix+path, &SFile{data, gzipData, brData, checksum, integrity, l.Prefix + path + "?h=" + checksum, 0, int64(len(data)), strconv.Itoa(len(data)), int64(len(gzipData)), strconv.Itoa(len(gzipData)), int64(len(brData)), strconv.Itoa(len(brData)), mime.TypeByExtension(ext), f, f.ModTime().UTC().Format(http.TimeFormat)})
|
||||
|
||||
DebugLogf("Added the '%s' static file", path)
|
||||
return nil
|
||||
DebugLogf("Added the '%s' static file", path)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l SFileList) Get(path string) (file *SFile, exists bool) {
|
||||
staticFileMutex.RLock()
|
||||
defer staticFileMutex.RUnlock()
|
||||
file, exists = l.Long[path]
|
||||
return file, exists
|
||||
func (list SFileList) Get(name string) (file SFile, exists bool) {
|
||||
staticFileMutex.RLock()
|
||||
defer staticFileMutex.RUnlock()
|
||||
file, exists = list[name]
|
||||
return file, exists
|
||||
}
|
||||
|
||||
// fetch without /s/ to avoid allocing in pages.go
|
||||
func (l SFileList) GetShort(name string) (file *SFile, exists bool) {
|
||||
staticFileMutex.RLock()
|
||||
defer staticFileMutex.RUnlock()
|
||||
file, exists = l.Short[name]
|
||||
return file, exists
|
||||
func (list SFileList) Set(name string, data SFile) {
|
||||
staticFileMutex.Lock()
|
||||
defer staticFileMutex.Unlock()
|
||||
list[name] = data
|
||||
}
|
||||
|
||||
func (l SFileList) Set(name string, data *SFile) {
|
||||
staticFileMutex.Lock()
|
||||
defer staticFileMutex.Unlock()
|
||||
// TODO: Propagate errors back up
|
||||
uurl, err := url.Parse(name)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
l.Long[uurl.Path] = data
|
||||
l.Short[strings.TrimPrefix(strings.TrimPrefix(name, l.Prefix), "/")] = data
|
||||
}
|
||||
|
||||
var gzipBestCompress sync.Pool
|
||||
|
||||
func CompressBytesGzip(in []byte) (b []byte, err error) {
|
||||
var buf bytes.Buffer
|
||||
ii := gzipBestCompress.Get()
|
||||
var gz *gzip.Writer
|
||||
if ii == nil {
|
||||
gz, err = gzip.NewWriterLevel(&buf, gzip.BestCompression)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
gz = ii.(*gzip.Writer)
|
||||
gz.Reset(&buf)
|
||||
}
|
||||
_, err = gz.Write(in)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = gz.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
gzipBestCompress.Put(gz)
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
func CompressBytesBrotli(in []byte) ([]byte, error) {
|
||||
var buff bytes.Buffer
|
||||
br := brotli.NewWriterLevel(&buff, brotli.BestCompression)
|
||||
_, err := br.Write(in)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = br.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buff.Bytes(), nil
|
||||
func compressBytesGzip(in []byte) ([]byte, error) {
|
||||
var buff bytes.Buffer
|
||||
gz, err := gzip.NewWriterLevel(&buff, gzip.BestCompression)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err = gz.Write(in)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = gz.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return buff.Bytes(), nil
|
||||
}
|
||||
|
206
common/forum.go
206
common/forum.go
@ -1,131 +1,128 @@
|
||||
package common
|
||||
|
||||
//import "fmt"
|
||||
import (
|
||||
//"log"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
_ "github.com/go-sql-driver/mysql"
|
||||
)
|
||||
|
||||
// TODO: Do we really need this?
|
||||
type ForumAdmin struct {
|
||||
ID int
|
||||
Name string
|
||||
Desc string
|
||||
Active bool
|
||||
Preset string
|
||||
TopicCount int
|
||||
PresetLang string
|
||||
ID int
|
||||
Name string
|
||||
Desc string
|
||||
Active bool
|
||||
Preset string
|
||||
TopicCount int
|
||||
PresetLang string
|
||||
}
|
||||
|
||||
type Forum struct {
|
||||
ID int
|
||||
Link string
|
||||
Name string
|
||||
Desc string
|
||||
Tmpl string
|
||||
Active bool
|
||||
Order int
|
||||
Preset string
|
||||
ParentID int
|
||||
ParentType string
|
||||
TopicCount int
|
||||
ID int
|
||||
Link string
|
||||
Name string
|
||||
Desc string
|
||||
Active bool
|
||||
Preset string
|
||||
ParentID int
|
||||
ParentType string
|
||||
TopicCount int
|
||||
|
||||
LastTopic *Topic
|
||||
LastTopicID int
|
||||
LastReplyer *User
|
||||
LastReplyerID int
|
||||
LastTopicTime string // So that we can re-calculate the relative time on the spot in /forums/
|
||||
LastPage int
|
||||
LastTopic *Topic
|
||||
LastTopicID int
|
||||
LastReplyer *User
|
||||
LastReplyerID int
|
||||
LastTopicTime string // So that we can re-calculate the relative time on the spot in /forums/
|
||||
}
|
||||
|
||||
// ? - What is this for?
|
||||
type ForumSimple struct {
|
||||
ID int
|
||||
Name string
|
||||
Active bool
|
||||
Preset string
|
||||
ID int
|
||||
Name string
|
||||
Active bool
|
||||
Preset string
|
||||
}
|
||||
|
||||
type ForumStmts struct {
|
||||
update *sql.Stmt
|
||||
setPreset *sql.Stmt
|
||||
update *sql.Stmt
|
||||
setPreset *sql.Stmt
|
||||
}
|
||||
|
||||
var forumStmts ForumStmts
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
forumStmts = ForumStmts{
|
||||
update: acc.Update("forums").Set("name=?,desc=?,active=?,preset=?").Where("fid=?").Prepare(),
|
||||
setPreset: acc.Update("forums").Set("preset=?").Where("fid=?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
forumStmts = ForumStmts{
|
||||
update: acc.Update("forums").Set("name = ?, desc = ?, active = ?, preset = ?").Where("fid = ?").Prepare(),
|
||||
setPreset: acc.Update("forums").Set("preset = ?").Where("fid = ?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
// Copy gives you a non-pointer concurrency safe copy of the forum
|
||||
func (f *Forum) Copy() (fcopy Forum) {
|
||||
fcopy = *f
|
||||
return fcopy
|
||||
func (forum *Forum) Copy() (fcopy Forum) {
|
||||
fcopy = *forum
|
||||
return fcopy
|
||||
}
|
||||
|
||||
// TODO: Write tests for this
|
||||
func (f *Forum) Update(name, desc string, active bool, preset string) error {
|
||||
if name == "" {
|
||||
name = f.Name
|
||||
}
|
||||
// TODO: Do a line sanitise? Does it matter?
|
||||
preset = strings.TrimSpace(preset)
|
||||
_, err := forumStmts.update.Exec(name, desc, active, preset, f.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if f.Preset != preset && preset != "custom" && preset != "" {
|
||||
err = PermmapToQuery(PresetToPermmap(preset), f.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
_ = Forums.Reload(f.ID)
|
||||
return nil
|
||||
func (forum *Forum) Update(name string, desc string, active bool, preset string) error {
|
||||
if name == "" {
|
||||
name = forum.Name
|
||||
}
|
||||
// TODO: Do a line sanitise? Does it matter?
|
||||
preset = strings.TrimSpace(preset)
|
||||
_, err := forumStmts.update.Exec(name, desc, active, preset, forum.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if forum.Preset != preset || preset == "custom" || preset == "" {
|
||||
err = PermmapToQuery(PresetToPermmap(preset), forum.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
_ = Forums.Reload(forum.ID)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *Forum) SetPreset(preset string, gid int) error {
|
||||
fp, changed := GroupForumPresetToForumPerms(preset)
|
||||
if changed {
|
||||
return f.SetPerms(fp, preset, gid)
|
||||
}
|
||||
return nil
|
||||
func (forum *Forum) SetPreset(preset string, gid int) error {
|
||||
fperms, changed := GroupForumPresetToForumPerms(preset)
|
||||
if changed {
|
||||
return forum.SetPerms(fperms, preset, gid)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: Refactor this
|
||||
func (f *Forum) SetPerms(fperms *ForumPerms, preset string, gid int) (err error) {
|
||||
err = ReplaceForumPermsForGroup(gid, map[int]string{f.ID: preset}, map[int]*ForumPerms{f.ID: fperms})
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return errors.New("Unable to update the permissions")
|
||||
}
|
||||
func (forum *Forum) SetPerms(fperms *ForumPerms, preset string, gid int) (err error) {
|
||||
err = ReplaceForumPermsForGroup(gid, map[int]string{forum.ID: preset}, map[int]*ForumPerms{forum.ID: fperms})
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return errors.New("Unable to update the permissions")
|
||||
}
|
||||
|
||||
// TODO: Add this and replaceForumPermsForGroup into a transaction?
|
||||
_, err = forumStmts.setPreset.Exec("", f.ID)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return errors.New("Unable to update the forum")
|
||||
}
|
||||
err = Forums.Reload(f.ID)
|
||||
if err != nil {
|
||||
return errors.New("Unable to reload forum")
|
||||
}
|
||||
err = FPStore.Reload(f.ID)
|
||||
if err != nil {
|
||||
return errors.New("Unable to reload the forum permissions")
|
||||
}
|
||||
return nil
|
||||
// TODO: Add this and replaceForumPermsForGroup into a transaction?
|
||||
_, err = forumStmts.setPreset.Exec("", forum.ID)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
return errors.New("Unable to update the forum")
|
||||
}
|
||||
err = Forums.Reload(forum.ID)
|
||||
if err != nil {
|
||||
return errors.New("Unable to reload forum")
|
||||
}
|
||||
err = FPStore.Reload(forum.ID)
|
||||
if err != nil {
|
||||
return errors.New("Unable to reload the forum permissions")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: Replace this sorting mechanism with something a lot more efficient
|
||||
@ -133,42 +130,27 @@ func (f *Forum) SetPerms(fperms *ForumPerms, preset string, gid int) (err error)
|
||||
type SortForum []*Forum
|
||||
|
||||
func (sf SortForum) Len() int {
|
||||
return len(sf)
|
||||
return len(sf)
|
||||
}
|
||||
func (sf SortForum) Swap(i, j int) {
|
||||
sf[i], sf[j] = sf[j], sf[i]
|
||||
sf[i], sf[j] = sf[j], sf[i]
|
||||
}
|
||||
|
||||
/*func (sf SortForum) Less(i,j int) bool {
|
||||
l := sf.less(i,j)
|
||||
if l {
|
||||
log.Printf("%s is less than %s. order: %d. id: %d.",sf[i].Name, sf[j].Name, sf[i].Order, sf[i].ID)
|
||||
} else {
|
||||
log.Printf("%s is not less than %s. order: %d. id: %d.",sf[i].Name, sf[j].Name, sf[i].Order, sf[i].ID)
|
||||
}
|
||||
return l
|
||||
}*/
|
||||
func (sf SortForum) Less(i, j int) bool {
|
||||
if sf[i].Order < sf[j].Order {
|
||||
return true
|
||||
} else if sf[i].Order == sf[j].Order {
|
||||
return sf[i].ID < sf[j].ID
|
||||
}
|
||||
return false
|
||||
return sf[i].ID < sf[j].ID
|
||||
}
|
||||
|
||||
// ! Don't use this outside of tests and possibly template_init.go
|
||||
func BlankForum(fid int, link, name, desc string, active bool, preset string, parentID int, parentType string, topicCount int) *Forum {
|
||||
return &Forum{ID: fid, Link: link, Name: name, Desc: desc, Active: active, Preset: preset, ParentID: parentID, ParentType: parentType, TopicCount: topicCount}
|
||||
func BlankForum(fid int, link string, name string, desc string, active bool, preset string, parentID int, parentType string, topicCount int) *Forum {
|
||||
return &Forum{ID: fid, Link: link, Name: name, Desc: desc, Active: active, Preset: preset, ParentID: parentID, ParentType: parentType, TopicCount: topicCount}
|
||||
}
|
||||
|
||||
func BuildForumURL(slug string, fid int) string {
|
||||
if slug == "" || !Config.BuildSlugs {
|
||||
return "/forum/" + strconv.Itoa(fid)
|
||||
}
|
||||
return "/forum/" + slug + "." + strconv.Itoa(fid)
|
||||
if slug == "" || !Config.BuildSlugs {
|
||||
return "/forum/" + strconv.Itoa(fid)
|
||||
}
|
||||
return "/forum/" + slug + "." + strconv.Itoa(fid)
|
||||
}
|
||||
|
||||
func GetForumURLPrefix() string {
|
||||
return "/forum/"
|
||||
return "/forum/"
|
||||
}
|
||||
|
@ -1,399 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var ForumActionStore ForumActionStoreInt
|
||||
|
||||
//var ForumActionRunnableStore ForumActionRunnableStoreInt
|
||||
|
||||
const (
|
||||
ForumActionDelete = iota
|
||||
ForumActionLock
|
||||
ForumActionUnlock
|
||||
ForumActionMove
|
||||
)
|
||||
|
||||
func ConvStringToAct(s string) int {
|
||||
switch s {
|
||||
case "delete":
|
||||
return ForumActionDelete
|
||||
case "lock":
|
||||
return ForumActionLock
|
||||
case "unlock":
|
||||
return ForumActionUnlock
|
||||
case "move":
|
||||
return ForumActionMove
|
||||
}
|
||||
return -1
|
||||
}
|
||||
func ConvActToString(a int) string {
|
||||
switch a {
|
||||
case ForumActionDelete:
|
||||
return "delete"
|
||||
case ForumActionLock:
|
||||
return "lock"
|
||||
case ForumActionUnlock:
|
||||
return "unlock"
|
||||
case ForumActionMove:
|
||||
return "move"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
var forumActionStmts ForumActionStmts
|
||||
|
||||
type ForumActionStmts struct {
|
||||
get1 *sql.Stmt
|
||||
get2 *sql.Stmt
|
||||
lock1 *sql.Stmt
|
||||
lock2 *sql.Stmt
|
||||
unlock1 *sql.Stmt
|
||||
unlock2 *sql.Stmt
|
||||
}
|
||||
|
||||
type ForumAction struct {
|
||||
ID int
|
||||
Forum int
|
||||
RunOnTopicCreation bool
|
||||
RunDaysAfterTopicCreation int
|
||||
RunDaysAfterTopicLastReply int
|
||||
Action int
|
||||
Extra string
|
||||
}
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
t := "topics"
|
||||
forumActionStmts = ForumActionStmts{
|
||||
get1: acc.Select(t).Cols("tid,createdBy,poll").Where("parentID=?").DateOlderThanQ("createdAt", "day").Stmt(),
|
||||
get2: acc.Select(t).Cols("tid,createdBy,poll").Where("parentID=?").DateOlderThanQ("lastReplyAt", "day").Stmt(),
|
||||
|
||||
/*lock1: acc.Update(t).Set("is_closed=1").Where("parentID=?").DateOlderThanQ("createdAt", "day").Stmt(),
|
||||
lock2: acc.Update(t).Set("is_closed=1").Where("parentID=?").DateOlderThanQ("lastReplyAt", "day").Stmt(),
|
||||
unlock1: acc.Update(t).Set("is_closed=0").Where("parentID=?").DateOlderThanQ("createdAt", "day").Stmt(),
|
||||
unlock2: acc.Update(t).Set("is_closed=0").Where("parentID=?").DateOlderThanQ("lastReplyAt", "day").Stmt(),*/
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
func (a *ForumAction) Run() error {
|
||||
if a.RunDaysAfterTopicCreation > 0 {
|
||||
if e := a.runDaysAfterTopicCreation(); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
if a.RunDaysAfterTopicLastReply > 0 {
|
||||
if e := a.runDaysAfterTopicLastReply(); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *ForumAction) runQ(stmt *sql.Stmt, days int, f func(t *Topic) error) error {
|
||||
rows, e := stmt.Query(days, a.Forum)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
// TODO: Decouple this
|
||||
t := &Topic{ParentID: a.Forum}
|
||||
if e := rows.Scan(&t.ID, &t.CreatedBy, &t.Poll); e != nil {
|
||||
return e
|
||||
}
|
||||
if e = f(t); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return rows.Err()
|
||||
}
|
||||
|
||||
func (a *ForumAction) runDaysAfterTopicCreation() (e error) {
|
||||
switch a.Action {
|
||||
case ForumActionDelete:
|
||||
// TODO: Bulk delete?
|
||||
e = a.runQ(forumActionStmts.get1, a.RunDaysAfterTopicCreation, func(t *Topic) error {
|
||||
return t.Delete()
|
||||
})
|
||||
case ForumActionLock:
|
||||
/*_, e := forumActionStmts.lock1.Exec(a.Forum)
|
||||
if e != nil {
|
||||
return e
|
||||
}*/
|
||||
// TODO: Bulk lock? Lock and get resultset of changed topics somehow?
|
||||
fmt.Println("ForumActionLock")
|
||||
e = a.runQ(forumActionStmts.get1, a.RunDaysAfterTopicCreation, func(t *Topic) error {
|
||||
fmt.Printf("t: %+v\n", t)
|
||||
return t.Lock()
|
||||
})
|
||||
case ForumActionUnlock:
|
||||
// TODO: Bulk unlock? Unlock and get resultset of changed topics somehow?
|
||||
e = a.runQ(forumActionStmts.get1, a.RunDaysAfterTopicCreation, func(t *Topic) error {
|
||||
return t.Unlock()
|
||||
})
|
||||
case ForumActionMove:
|
||||
destForum, e := strconv.Atoi(a.Extra)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
e = a.runQ(forumActionStmts.get1, a.RunDaysAfterTopicCreation, func(t *Topic) error {
|
||||
return t.MoveTo(destForum)
|
||||
})
|
||||
}
|
||||
return e
|
||||
}
|
||||
|
||||
func (a *ForumAction) runDaysAfterTopicLastReply() (e error) {
|
||||
switch a.Action {
|
||||
case ForumActionDelete:
|
||||
e = a.runQ(forumActionStmts.get2, a.RunDaysAfterTopicLastReply, func(t *Topic) error {
|
||||
return t.Delete()
|
||||
})
|
||||
case ForumActionLock:
|
||||
// TODO: Bulk lock? Lock and get resultset of changed topics somehow?
|
||||
e = a.runQ(forumActionStmts.get2, a.RunDaysAfterTopicLastReply, func(t *Topic) error {
|
||||
return t.Lock()
|
||||
})
|
||||
case ForumActionUnlock:
|
||||
// TODO: Bulk unlock? Unlock and get resultset of changed topics somehow?
|
||||
e = a.runQ(forumActionStmts.get2, a.RunDaysAfterTopicLastReply, func(t *Topic) error {
|
||||
return t.Unlock()
|
||||
})
|
||||
case ForumActionMove:
|
||||
destForum, e := strconv.Atoi(a.Extra)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
e = a.runQ(forumActionStmts.get2, a.RunDaysAfterTopicLastReply, func(t *Topic) error {
|
||||
return t.MoveTo(destForum)
|
||||
})
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *ForumAction) TopicCreation(tid int) error {
|
||||
if !a.RunOnTopicCreation {
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type ForumActionStoreInt interface {
|
||||
Get(faid int) (*ForumAction, error)
|
||||
GetInForum(fid int) ([]*ForumAction, error)
|
||||
GetAll() ([]*ForumAction, error)
|
||||
GetNewTopicActions(fid int) ([]*ForumAction, error)
|
||||
|
||||
Add(fa *ForumAction) (int, error)
|
||||
Delete(faid int) error
|
||||
Exists(faid int) bool
|
||||
Count() int
|
||||
CountInForum(fid int) int
|
||||
|
||||
DailyTick() error
|
||||
}
|
||||
|
||||
type DefaultForumActionStore struct {
|
||||
get *sql.Stmt
|
||||
getInForum *sql.Stmt
|
||||
getAll *sql.Stmt
|
||||
getNewTopicActions *sql.Stmt
|
||||
|
||||
add *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
exists *sql.Stmt
|
||||
count *sql.Stmt
|
||||
countInForum *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultForumActionStore(acc *qgen.Accumulator) (*DefaultForumActionStore, error) {
|
||||
fa := "forums_actions"
|
||||
allCols := "faid,fid,runOnTopicCreation,runDaysAfterTopicCreation,runDaysAfterTopicLastReply,action,extra"
|
||||
return &DefaultForumActionStore{
|
||||
get: acc.Select(fa).Columns("fid,runOnTopicCreation,runDaysAfterTopicCreation,runDaysAfterTopicLastReply,action,extra").Where("faid=?").Prepare(),
|
||||
getInForum: acc.Select(fa).Columns("faid,runOnTopicCreation,runDaysAfterTopicCreation,runDaysAfterTopicLastReply,action,extra").Where("fid=?").Prepare(),
|
||||
getAll: acc.Select(fa).Columns(allCols).Prepare(),
|
||||
getNewTopicActions: acc.Select(fa).Columns(allCols).Where("fid=? AND runOnTopicCreation=1").Prepare(),
|
||||
|
||||
add: acc.Insert(fa).Columns("fid,runOnTopicCreation,runDaysAfterTopicCreation,runDaysAfterTopicLastReply,action,extra").Fields("?,?,?,?,?,?").Prepare(),
|
||||
delete: acc.Delete(fa).Where("faid=?").Prepare(),
|
||||
exists: acc.Exists(fa, "faid").Prepare(),
|
||||
count: acc.Count(fa).Prepare(),
|
||||
countInForum: acc.Count(fa).Where("fid=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionStore) DailyTick() error {
|
||||
fas, e := s.GetAll()
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
for _, fa := range fas {
|
||||
if e := fa.Run(); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionStore) Get(id int) (*ForumAction, error) {
|
||||
fa := ForumAction{ID: id}
|
||||
var str string
|
||||
e := s.get.QueryRow(id).Scan(&fa.Forum, &fa.RunOnTopicCreation, &fa.RunDaysAfterTopicCreation, &fa.RunDaysAfterTopicLastReply, &str, &fa.Extra)
|
||||
fa.Action = ConvStringToAct(str)
|
||||
return &fa, e
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionStore) GetInForum(fid int) (fas []*ForumAction, e error) {
|
||||
rows, e := s.getInForum.Query(fid)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
defer rows.Close()
|
||||
var str string
|
||||
for rows.Next() {
|
||||
fa := ForumAction{Forum: fid}
|
||||
if e := rows.Scan(&fa.ID, &fa.RunOnTopicCreation, &fa.RunDaysAfterTopicCreation, &fa.RunDaysAfterTopicLastReply, &str, &fa.Extra); e != nil {
|
||||
return nil, e
|
||||
}
|
||||
fa.Action = ConvStringToAct(str)
|
||||
fas = append(fas, &fa)
|
||||
}
|
||||
return fas, rows.Err()
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionStore) GetAll() (fas []*ForumAction, e error) {
|
||||
rows, e := s.getAll.Query()
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
defer rows.Close()
|
||||
var str string
|
||||
for rows.Next() {
|
||||
fa := ForumAction{}
|
||||
if e := rows.Scan(&fa.ID, &fa.Forum, &fa.RunOnTopicCreation, &fa.RunDaysAfterTopicCreation, &fa.RunDaysAfterTopicLastReply, &str, &fa.Extra); e != nil {
|
||||
return nil, e
|
||||
}
|
||||
fa.Action = ConvStringToAct(str)
|
||||
fas = append(fas, &fa)
|
||||
}
|
||||
return fas, rows.Err()
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionStore) GetNewTopicActions(fid int) (fas []*ForumAction, e error) {
|
||||
rows, e := s.getNewTopicActions.Query(fid)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
defer rows.Close()
|
||||
var str string
|
||||
for rows.Next() {
|
||||
fa := ForumAction{RunOnTopicCreation: true}
|
||||
if e := rows.Scan(&fa.ID, &fa.Forum, &fa.RunDaysAfterTopicCreation, &fa.RunDaysAfterTopicLastReply, &str, &fa.Extra); e != nil {
|
||||
return nil, e
|
||||
}
|
||||
fa.Action = ConvStringToAct(str)
|
||||
fas = append(fas, &fa)
|
||||
}
|
||||
return fas, rows.Err()
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionStore) Add(fa *ForumAction) (int, error) {
|
||||
res, e := s.add.Exec(fa.Forum, fa.RunOnTopicCreation, fa.RunDaysAfterTopicCreation, fa.RunDaysAfterTopicLastReply, ConvActToString(fa.Action), fa.Extra)
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
lastID, e := res.LastInsertId()
|
||||
return int(lastID), e
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionStore) Delete(id int) error {
|
||||
_, e := s.delete.Exec(id)
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionStore) Exists(id int) bool {
|
||||
err := s.exists.QueryRow(id).Scan(&id)
|
||||
if err != nil && err != ErrNoRows {
|
||||
LogError(err)
|
||||
}
|
||||
return err != ErrNoRows
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionStore) Count() (count int) {
|
||||
err := s.count.QueryRow().Scan(&count)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionStore) CountInForum(fid int) (count int) {
|
||||
return Countf(s.countInForum, fid)
|
||||
}
|
||||
|
||||
/*type ForumActionRunnable struct {
|
||||
ID int
|
||||
ActionID int
|
||||
TargetID int
|
||||
TargetType int // 0 = topic
|
||||
RunAfter int //unixtime
|
||||
}
|
||||
|
||||
type ForumActionRunnableStoreInt interface {
|
||||
GetAfterTime(unix int) ([]*ForumActionRunnable, error)
|
||||
GetInForum(fid int) ([]*ForumActionRunnable, error)
|
||||
Delete(faid int) error
|
||||
DeleteInForum(fid int) error
|
||||
DeleteByActionID(faid int) error
|
||||
Count() int
|
||||
CountInForum(fid int) int
|
||||
}
|
||||
|
||||
type DefaultForumActionRunnableStore struct {
|
||||
delete *sql.Stmt
|
||||
deleteInForum *sql.Stmt
|
||||
count *sql.Stmt
|
||||
countInForum *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultForumActionRunnableStore(acc *qgen.Accumulator) (*DefaultForumActionRunnableStore, error) {
|
||||
fa := "forums_actions"
|
||||
return &DefaultForumActionRunnableStore{
|
||||
delete: acc.Delete(fa).Where("faid=?").Prepare(),
|
||||
deleteInForum: acc.Delete(fa).Where("fid=?").Prepare(),
|
||||
count: acc.Count(fa).Prepare(),
|
||||
countInForum: acc.Count(fa).Where("faid=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionRunnableStore) Delete(id int) error {
|
||||
_, e := s.delete.Exec(id)
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionRunnableStore) DeleteInForum(fid int) error {
|
||||
_, e := s.deleteInForum.Exec(id)
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionRunnableStore) Count() (count int) {
|
||||
err := s.count.QueryRow().Scan(&count)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
func (s *DefaultForumActionRunnableStore) CountInForum(fid int) (count int) {
|
||||
return Countf(s.countInForum, fid)
|
||||
}
|
||||
*/
|
@ -4,317 +4,325 @@ import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
// ? - Can we avoid duplicating the items in this list in a bunch of places?
|
||||
|
||||
var LocalPermList = []string{
|
||||
"ViewTopic",
|
||||
"LikeItem",
|
||||
"CreateTopic",
|
||||
"EditTopic",
|
||||
"DeleteTopic",
|
||||
"CreateReply",
|
||||
"EditReply",
|
||||
"DeleteReply",
|
||||
"PinTopic",
|
||||
"CloseTopic",
|
||||
"MoveTopic",
|
||||
"ViewTopic",
|
||||
"LikeItem",
|
||||
"CreateTopic",
|
||||
"EditTopic",
|
||||
"DeleteTopic",
|
||||
"CreateReply",
|
||||
"EditReply",
|
||||
"DeleteReply",
|
||||
"PinTopic",
|
||||
"CloseTopic",
|
||||
"MoveTopic",
|
||||
}
|
||||
|
||||
// TODO: Rename this to ForumPermSet?
|
||||
/* Inherit from group permissions for ones we don't have */
|
||||
type ForumPerms struct {
|
||||
ViewTopic bool
|
||||
//ViewOwnTopic bool
|
||||
LikeItem bool
|
||||
CreateTopic bool
|
||||
EditTopic bool
|
||||
DeleteTopic bool
|
||||
CreateReply bool
|
||||
//CreateReplyToOwn bool
|
||||
EditReply bool
|
||||
//EditOwnReply bool
|
||||
DeleteReply bool
|
||||
PinTopic bool
|
||||
CloseTopic bool
|
||||
//CloseOwnTopic bool
|
||||
MoveTopic bool
|
||||
ViewTopic bool
|
||||
//ViewOwnTopic bool
|
||||
LikeItem bool
|
||||
CreateTopic bool
|
||||
EditTopic bool
|
||||
DeleteTopic bool
|
||||
CreateReply bool
|
||||
//CreateReplyToOwn bool
|
||||
EditReply bool
|
||||
//EditOwnReply bool
|
||||
DeleteReply bool
|
||||
PinTopic bool
|
||||
CloseTopic bool
|
||||
//CloseOwnTopic bool
|
||||
MoveTopic bool
|
||||
|
||||
Overrides bool
|
||||
ExtData map[string]bool
|
||||
Overrides bool
|
||||
ExtData map[string]bool
|
||||
}
|
||||
|
||||
func PresetToPermmap(preset string) (out map[string]*ForumPerms) {
|
||||
out = make(map[string]*ForumPerms)
|
||||
switch preset {
|
||||
case "all":
|
||||
out["guests"] = ReadForumPerms()
|
||||
out["members"] = ReadWriteForumPerms()
|
||||
out["staff"] = AllForumPerms()
|
||||
out["admins"] = AllForumPerms()
|
||||
case "announce":
|
||||
out["guests"] = ReadForumPerms()
|
||||
out["members"] = ReadReplyForumPerms()
|
||||
out["staff"] = AllForumPerms()
|
||||
out["admins"] = AllForumPerms()
|
||||
case "members":
|
||||
out["guests"] = BlankForumPerms()
|
||||
out["members"] = ReadWriteForumPerms()
|
||||
out["staff"] = AllForumPerms()
|
||||
out["admins"] = AllForumPerms()
|
||||
case "staff":
|
||||
out["guests"] = BlankForumPerms()
|
||||
out["members"] = BlankForumPerms()
|
||||
out["staff"] = ReadWriteForumPerms()
|
||||
out["admins"] = AllForumPerms()
|
||||
case "admins":
|
||||
out["guests"] = BlankForumPerms()
|
||||
out["members"] = BlankForumPerms()
|
||||
out["staff"] = BlankForumPerms()
|
||||
out["admins"] = AllForumPerms()
|
||||
case "archive":
|
||||
out["guests"] = ReadForumPerms()
|
||||
out["members"] = ReadForumPerms()
|
||||
out["staff"] = ReadForumPerms()
|
||||
out["admins"] = ReadForumPerms() //CurateForumPerms. Delete / Edit but no create?
|
||||
default:
|
||||
out["guests"] = BlankForumPerms()
|
||||
out["members"] = BlankForumPerms()
|
||||
out["staff"] = BlankForumPerms()
|
||||
out["admins"] = BlankForumPerms()
|
||||
}
|
||||
return out
|
||||
out = make(map[string]*ForumPerms)
|
||||
switch preset {
|
||||
case "all":
|
||||
out["guests"] = ReadForumPerms()
|
||||
out["members"] = ReadWriteForumPerms()
|
||||
out["staff"] = AllForumPerms()
|
||||
out["admins"] = AllForumPerms()
|
||||
case "announce":
|
||||
out["guests"] = ReadForumPerms()
|
||||
out["members"] = ReadReplyForumPerms()
|
||||
out["staff"] = AllForumPerms()
|
||||
out["admins"] = AllForumPerms()
|
||||
case "members":
|
||||
out["guests"] = BlankForumPerms()
|
||||
out["members"] = ReadWriteForumPerms()
|
||||
out["staff"] = AllForumPerms()
|
||||
out["admins"] = AllForumPerms()
|
||||
case "staff":
|
||||
out["guests"] = BlankForumPerms()
|
||||
out["members"] = BlankForumPerms()
|
||||
out["staff"] = ReadWriteForumPerms()
|
||||
out["admins"] = AllForumPerms()
|
||||
case "admins":
|
||||
out["guests"] = BlankForumPerms()
|
||||
out["members"] = BlankForumPerms()
|
||||
out["staff"] = BlankForumPerms()
|
||||
out["admins"] = AllForumPerms()
|
||||
case "archive":
|
||||
out["guests"] = ReadForumPerms()
|
||||
out["members"] = ReadForumPerms()
|
||||
out["staff"] = ReadForumPerms()
|
||||
out["admins"] = ReadForumPerms() //CurateForumPerms. Delete / Edit but no create?
|
||||
default:
|
||||
out["guests"] = BlankForumPerms()
|
||||
out["members"] = BlankForumPerms()
|
||||
out["staff"] = BlankForumPerms()
|
||||
out["admins"] = BlankForumPerms()
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func PermmapToQuery(permmap map[string]*ForumPerms, fid int) error {
|
||||
tx, err := qgen.Builder.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
tx, err := qgen.Builder.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
deleteForumPermsByForumTx, err := qgen.Builder.SimpleDeleteTx(tx, "forums_permissions", "fid = ?")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = deleteForumPermsByForumTx.Exec(fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
deleteForumPermsByForumTx, err := qgen.Builder.SimpleDeleteTx(tx, "forums_permissions", "fid = ?")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
perms, err := json.Marshal(permmap["admins"])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = deleteForumPermsByForumTx.Exec(fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
addForumPermsToForumAdminsTx, err := qgen.Builder.SimpleInsertSelectTx(tx,
|
||||
qgen.DBInsert{"forums_permissions", "gid,fid,preset,permissions", ""},
|
||||
qgen.DBSelect{"users_groups", "gid,?,'',?", "is_admin = 1", "", ""},
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = addForumPermsToForumAdminsTx.Exec(fid, perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
perms, err := json.Marshal(permmap["admins"])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
perms, err = json.Marshal(permmap["staff"])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
addForumPermsToForumAdminsTx, err := qgen.Builder.SimpleInsertSelectTx(tx,
|
||||
qgen.DBInsert{"forums_permissions", "gid, fid, preset, permissions", ""},
|
||||
qgen.DBSelect{"users_groups", "gid, ? AS fid, ? AS preset, ? AS permissions", "is_admin = 1", "", ""},
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
addForumPermsToForumStaffTx, err := qgen.Builder.SimpleInsertSelectTx(tx,
|
||||
qgen.DBInsert{"forums_permissions", "gid,fid,preset,permissions", ""},
|
||||
qgen.DBSelect{"users_groups", "gid,?,'',?", "is_admin = 0 AND is_mod = 1", "", ""},
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = addForumPermsToForumStaffTx.Exec(fid, perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = addForumPermsToForumAdminsTx.Exec(fid, "", perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
perms, err = json.Marshal(permmap["members"])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
perms, err = json.Marshal(permmap["staff"])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
addForumPermsToForumMembersTx, err := qgen.Builder.SimpleInsertSelectTx(tx,
|
||||
qgen.DBInsert{"forums_permissions", "gid,fid,preset,permissions", ""},
|
||||
qgen.DBSelect{"users_groups", "gid,?,'',?", "is_admin = 0 AND is_mod = 0 AND is_banned = 0", "", ""},
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = addForumPermsToForumMembersTx.Exec(fid, perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
addForumPermsToForumStaffTx, err := qgen.Builder.SimpleInsertSelectTx(tx,
|
||||
qgen.DBInsert{"forums_permissions", "gid, fid, preset, permissions", ""},
|
||||
qgen.DBSelect{"users_groups", "gid, ? AS fid, ? AS preset, ? AS permissions", "is_admin = 0 AND is_mod = 1", "", ""},
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = addForumPermsToForumStaffTx.Exec(fid, "", perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: The group ID is probably a variable somewhere. Find it and use it.
|
||||
// Group 5 is the Awaiting Activation group
|
||||
err = ReplaceForumPermsForGroupTx(tx, 5, map[int]string{fid: ""}, map[int]*ForumPerms{fid: permmap["guests"]})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
perms, err = json.Marshal(permmap["members"])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Consult a config setting instead of GuestUser?
|
||||
err = ReplaceForumPermsForGroupTx(tx, GuestUser.Group, map[int]string{fid: ""}, map[int]*ForumPerms{fid: permmap["guests"]})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
addForumPermsToForumMembersTx, err := qgen.Builder.SimpleInsertSelectTx(tx,
|
||||
qgen.DBInsert{"forums_permissions", "gid, fid, preset, permissions", ""},
|
||||
qgen.DBSelect{"users_groups", "gid, ? AS fid, ? AS preset, ? AS permissions", "is_admin = 0 AND is_mod = 0 AND is_banned = 0", "", ""},
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = addForumPermsToForumMembersTx.Exec(fid, "", perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return FPStore.Reload(fid)
|
||||
//return TopicList.RebuildPermTree()
|
||||
// TODO: The group ID is probably a variable somewhere. Find it and use it.
|
||||
// Group 5 is the Awaiting Activation group
|
||||
err = ReplaceForumPermsForGroupTx(tx, 5, map[int]string{fid: ""}, map[int]*ForumPerms{fid: permmap["guests"]})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Consult a config setting instead of GuestUser?
|
||||
err = ReplaceForumPermsForGroupTx(tx, GuestUser.Group, map[int]string{fid: ""}, map[int]*ForumPerms{fid: permmap["guests"]})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = FPStore.Reload(fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return TopicList.RebuildPermTree()
|
||||
}
|
||||
|
||||
// TODO: FPStore.Reload?
|
||||
func ReplaceForumPermsForGroup(gid int, presetSet map[int]string, permSets map[int]*ForumPerms) error {
|
||||
tx, err := qgen.Builder.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
tx, err := qgen.Builder.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
err = ReplaceForumPermsForGroupTx(tx, gid, presetSet, permSets)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return tx.Commit()
|
||||
//return TopicList.RebuildPermTree()
|
||||
err = ReplaceForumPermsForGroupTx(tx, gid, presetSet, permSets)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return TopicList.RebuildPermTree()
|
||||
}
|
||||
|
||||
func ReplaceForumPermsForGroupTx(tx *sql.Tx, gid int, presetSets map[int]string, permSets map[int]*ForumPerms) error {
|
||||
deleteForumPermsForGroupTx, err := qgen.Builder.SimpleDeleteTx(tx, "forums_permissions", "gid = ? AND fid = ?")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
deleteForumPermsForGroupTx, err := qgen.Builder.SimpleDeleteTx(tx, "forums_permissions", "gid = ? AND fid = ?")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
addForumPermsToGroupTx, err := qgen.Builder.SimpleInsertTx(tx, "forums_permissions", "gid,fid,preset,permissions", "?,?,?,?")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for fid, permSet := range permSets {
|
||||
permstr, err := json.Marshal(permSet)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = deleteForumPermsForGroupTx.Exec(gid, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = addForumPermsToGroupTx.Exec(gid, fid, presetSets[fid], string(permstr))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
addForumPermsToGroupTx, err := qgen.Builder.SimpleInsertTx(tx, "forums_permissions", "gid, fid, preset, permissions", "?,?,?,?")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for fid, permSet := range permSets {
|
||||
permstr, err := json.Marshal(permSet)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = deleteForumPermsForGroupTx.Exec(gid, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = addForumPermsToGroupTx.Exec(gid, fid, presetSets[fid], string(permstr))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: Refactor this and write tests for it
|
||||
// TODO: We really need to improve the thread safety of this
|
||||
func ForumPermsToGroupForumPreset(fp *ForumPerms) string {
|
||||
if !fp.Overrides {
|
||||
return "default"
|
||||
}
|
||||
if !fp.ViewTopic {
|
||||
return "no_access"
|
||||
}
|
||||
canPost := (fp.LikeItem && fp.CreateTopic && fp.CreateReply)
|
||||
canModerate := (canPost && fp.EditTopic && fp.DeleteTopic && fp.EditReply && fp.DeleteReply && fp.PinTopic && fp.CloseTopic && fp.MoveTopic)
|
||||
if canModerate {
|
||||
return "can_moderate"
|
||||
}
|
||||
if fp.EditTopic || fp.DeleteTopic || fp.EditReply || fp.DeleteReply || fp.PinTopic || fp.CloseTopic || fp.MoveTopic {
|
||||
//if !canPost {
|
||||
return "custom"
|
||||
//}
|
||||
//return "quasi_mod"
|
||||
}
|
||||
func ForumPermsToGroupForumPreset(fperms *ForumPerms) string {
|
||||
if !fperms.Overrides {
|
||||
return "default"
|
||||
}
|
||||
if !fperms.ViewTopic {
|
||||
return "no_access"
|
||||
}
|
||||
var canPost = (fperms.LikeItem && fperms.CreateTopic && fperms.CreateReply)
|
||||
var canModerate = (canPost && fperms.EditTopic && fperms.DeleteTopic && fperms.EditReply && fperms.DeleteReply && fperms.PinTopic && fperms.CloseTopic && fperms.MoveTopic)
|
||||
if canModerate {
|
||||
return "can_moderate"
|
||||
}
|
||||
if fperms.EditTopic || fperms.DeleteTopic || fperms.EditReply || fperms.DeleteReply || fperms.PinTopic || fperms.CloseTopic || fperms.MoveTopic {
|
||||
if !canPost {
|
||||
return "custom"
|
||||
}
|
||||
return "quasi_mod"
|
||||
}
|
||||
|
||||
if canPost {
|
||||
return "can_post"
|
||||
}
|
||||
if fp.ViewTopic && !fp.LikeItem && !fp.CreateTopic && !fp.CreateReply {
|
||||
return "read_only"
|
||||
}
|
||||
return "custom"
|
||||
if canPost {
|
||||
return "can_post"
|
||||
}
|
||||
if fperms.ViewTopic && !fperms.LikeItem && !fperms.CreateTopic && !fperms.CreateReply {
|
||||
return "read_only"
|
||||
}
|
||||
return "custom"
|
||||
}
|
||||
|
||||
func GroupForumPresetToForumPerms(preset string) (fperms *ForumPerms, changed bool) {
|
||||
switch preset {
|
||||
case "read_only":
|
||||
return ReadForumPerms(), true
|
||||
case "can_post":
|
||||
return ReadWriteForumPerms(), true
|
||||
case "can_moderate":
|
||||
return AllForumPerms(), true
|
||||
case "no_access":
|
||||
return &ForumPerms{Overrides: true, ExtData: make(map[string]bool)}, true
|
||||
case "default":
|
||||
return BlankForumPerms(), true
|
||||
}
|
||||
return fperms, false
|
||||
switch preset {
|
||||
case "read_only":
|
||||
return ReadForumPerms(), true
|
||||
case "can_post":
|
||||
return ReadWriteForumPerms(), true
|
||||
case "can_moderate":
|
||||
return AllForumPerms(), true
|
||||
case "no_access":
|
||||
return &ForumPerms{Overrides: true, ExtData: make(map[string]bool)}, true
|
||||
case "default":
|
||||
return BlankForumPerms(), true
|
||||
}
|
||||
return fperms, false
|
||||
}
|
||||
|
||||
func BlankForumPerms() *ForumPerms {
|
||||
return &ForumPerms{ViewTopic: false}
|
||||
return &ForumPerms{ViewTopic: false}
|
||||
}
|
||||
|
||||
func ReadWriteForumPerms() *ForumPerms {
|
||||
return &ForumPerms{
|
||||
ViewTopic: true,
|
||||
LikeItem: true,
|
||||
CreateTopic: true,
|
||||
CreateReply: true,
|
||||
Overrides: true,
|
||||
ExtData: make(map[string]bool),
|
||||
}
|
||||
return &ForumPerms{
|
||||
ViewTopic: true,
|
||||
LikeItem: true,
|
||||
CreateTopic: true,
|
||||
CreateReply: true,
|
||||
Overrides: true,
|
||||
ExtData: make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
||||
func ReadReplyForumPerms() *ForumPerms {
|
||||
return &ForumPerms{
|
||||
ViewTopic: true,
|
||||
LikeItem: true,
|
||||
CreateReply: true,
|
||||
Overrides: true,
|
||||
ExtData: make(map[string]bool),
|
||||
}
|
||||
return &ForumPerms{
|
||||
ViewTopic: true,
|
||||
LikeItem: true,
|
||||
CreateReply: true,
|
||||
Overrides: true,
|
||||
ExtData: make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
||||
func ReadForumPerms() *ForumPerms {
|
||||
return &ForumPerms{
|
||||
ViewTopic: true,
|
||||
Overrides: true,
|
||||
ExtData: make(map[string]bool),
|
||||
}
|
||||
return &ForumPerms{
|
||||
ViewTopic: true,
|
||||
Overrides: true,
|
||||
ExtData: make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
||||
// AllForumPerms is a set of forum local permissions with everything set to true
|
||||
func AllForumPerms() *ForumPerms {
|
||||
return &ForumPerms{
|
||||
ViewTopic: true,
|
||||
LikeItem: true,
|
||||
CreateTopic: true,
|
||||
EditTopic: true,
|
||||
DeleteTopic: true,
|
||||
CreateReply: true,
|
||||
EditReply: true,
|
||||
DeleteReply: true,
|
||||
PinTopic: true,
|
||||
CloseTopic: true,
|
||||
MoveTopic: true,
|
||||
return &ForumPerms{
|
||||
ViewTopic: true,
|
||||
LikeItem: true,
|
||||
CreateTopic: true,
|
||||
EditTopic: true,
|
||||
DeleteTopic: true,
|
||||
CreateReply: true,
|
||||
EditReply: true,
|
||||
DeleteReply: true,
|
||||
PinTopic: true,
|
||||
CloseTopic: true,
|
||||
MoveTopic: true,
|
||||
|
||||
Overrides: true,
|
||||
ExtData: make(map[string]bool),
|
||||
}
|
||||
Overrides: true,
|
||||
ExtData: make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
@ -5,251 +5,207 @@ import (
|
||||
"encoding/json"
|
||||
"sync"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var FPStore ForumPermsStore
|
||||
|
||||
type ForumPermsStore interface {
|
||||
Init() error
|
||||
GetAllMap() (bigMap map[int]map[int]*ForumPerms)
|
||||
Get(fid, gid int) (fp *ForumPerms, err error)
|
||||
GetCopy(fid, gid int) (fp ForumPerms, err error)
|
||||
ReloadAll() error
|
||||
Reload(id int) error
|
||||
Init() error
|
||||
GetAllMap() (bigMap map[int]map[int]*ForumPerms)
|
||||
Get(fid int, gid int) (fperms *ForumPerms, err error)
|
||||
GetCopy(fid int, gid int) (fperms ForumPerms, err error)
|
||||
ReloadAll() error
|
||||
Reload(id int) error
|
||||
}
|
||||
|
||||
type ForumPermsCache interface {
|
||||
}
|
||||
|
||||
type MemoryForumPermsStore struct {
|
||||
getByForum *sql.Stmt
|
||||
getByForumGroup *sql.Stmt
|
||||
getByForum *sql.Stmt
|
||||
getByForumGroup *sql.Stmt
|
||||
|
||||
evenForums map[int]map[int]*ForumPerms
|
||||
oddForums map[int]map[int]*ForumPerms // [fid][gid]*ForumPerms
|
||||
evenLock sync.RWMutex
|
||||
oddLock sync.RWMutex
|
||||
evenForums map[int]map[int]*ForumPerms
|
||||
oddForums map[int]map[int]*ForumPerms // [fid][gid]*ForumPerms
|
||||
evenLock sync.RWMutex
|
||||
oddLock sync.RWMutex
|
||||
}
|
||||
|
||||
func NewMemoryForumPermsStore() (*MemoryForumPermsStore, error) {
|
||||
acc := qgen.NewAcc()
|
||||
fp := "forums_permissions"
|
||||
return &MemoryForumPermsStore{
|
||||
getByForum: acc.Select(fp).Columns("gid,permissions").Where("fid=?").Orderby("gid ASC").Prepare(),
|
||||
getByForumGroup: acc.Select(fp).Columns("permissions").Where("fid=? AND gid=?").Prepare(),
|
||||
acc := qgen.NewAcc()
|
||||
return &MemoryForumPermsStore{
|
||||
getByForum: acc.Select("forums_permissions").Columns("gid, permissions").Where("fid = ?").Orderby("gid ASC").Prepare(),
|
||||
getByForumGroup: acc.Select("forums_permissions").Columns("permissions").Where("fid = ? AND gid = ?").Prepare(),
|
||||
|
||||
evenForums: make(map[int]map[int]*ForumPerms),
|
||||
oddForums: make(map[int]map[int]*ForumPerms),
|
||||
}, acc.FirstError()
|
||||
evenForums: make(map[int]map[int]*ForumPerms),
|
||||
oddForums: make(map[int]map[int]*ForumPerms),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *MemoryForumPermsStore) Init() error {
|
||||
DebugLog("Initialising the forum perms store")
|
||||
return s.ReloadAll()
|
||||
func (fps *MemoryForumPermsStore) Init() error {
|
||||
DebugLog("Initialising the forum perms store")
|
||||
return fps.ReloadAll()
|
||||
}
|
||||
|
||||
// TODO: Optimise this?
|
||||
func (s *MemoryForumPermsStore) ReloadAll() error {
|
||||
DebugLog("Reloading the forum perms")
|
||||
fids, e := Forums.GetAllIDs()
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
for _, fid := range fids {
|
||||
if e := s.reload(fid); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
if e := s.recalcCanSeeAll(); e != nil {
|
||||
return e
|
||||
}
|
||||
TopicListThaw.Thaw()
|
||||
return nil
|
||||
func (fps *MemoryForumPermsStore) ReloadAll() error {
|
||||
DebugLog("Reloading the forum perms")
|
||||
fids, err := Forums.GetAllIDs()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, fid := range fids {
|
||||
err := fps.Reload(fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *MemoryForumPermsStore) parseForumPerm(perms []byte) (pperms *ForumPerms, e error) {
|
||||
DebugDetail("perms: ", string(perms))
|
||||
pperms = BlankForumPerms()
|
||||
e = json.Unmarshal(perms, &pperms)
|
||||
pperms.ExtData = make(map[string]bool)
|
||||
pperms.Overrides = true
|
||||
return pperms, e
|
||||
}
|
||||
|
||||
func (s *MemoryForumPermsStore) Reload(fid int) error {
|
||||
e := s.reload(fid)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
if e = s.recalcCanSeeAll(); e != nil {
|
||||
return e
|
||||
}
|
||||
TopicListThaw.Thaw()
|
||||
return nil
|
||||
func (fps *MemoryForumPermsStore) parseForumPerm(perms []byte) (pperms *ForumPerms, err error) {
|
||||
DebugDetail("perms: ", string(perms))
|
||||
pperms = BlankForumPerms()
|
||||
err = json.Unmarshal(perms, &pperms)
|
||||
pperms.ExtData = make(map[string]bool)
|
||||
pperms.Overrides = true
|
||||
return pperms, err
|
||||
}
|
||||
|
||||
// TODO: Need a more thread-safe way of doing this. Possibly with sync.Map?
|
||||
func (s *MemoryForumPermsStore) reload(fid int) error {
|
||||
DebugLogf("Reloading the forum permissions for forum #%d", fid)
|
||||
rows, err := s.getByForum.Query(fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
func (fps *MemoryForumPermsStore) Reload(fid int) error {
|
||||
DebugLogf("Reloading the forum permissions for forum #%d", fid)
|
||||
rows, err := fps.getByForum.Query(fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
forumPerms := make(map[int]*ForumPerms)
|
||||
for rows.Next() {
|
||||
var gid int
|
||||
var perms []byte
|
||||
err := rows.Scan(&gid, &perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var forumPerms = make(map[int]*ForumPerms)
|
||||
for rows.Next() {
|
||||
var gid int
|
||||
var perms []byte
|
||||
err := rows.Scan(&gid, &perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
DebugLog("gid:", gid)
|
||||
DebugLogf("perms: %+v\n", perms)
|
||||
pperms, err := s.parseForumPerm(perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
DebugLogf("pperms: %+v\n", pperms)
|
||||
forumPerms[gid] = pperms
|
||||
}
|
||||
DebugLogf("forumPerms: %+v\n", forumPerms)
|
||||
pperms, err := fps.parseForumPerm(perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
forumPerms[gid] = pperms
|
||||
}
|
||||
DebugLogf("forumPerms: %+v\n", forumPerms)
|
||||
if fid%2 == 0 {
|
||||
fps.evenLock.Lock()
|
||||
fps.evenForums[fid] = forumPerms
|
||||
fps.evenLock.Unlock()
|
||||
} else {
|
||||
fps.oddLock.Lock()
|
||||
fps.oddForums[fid] = forumPerms
|
||||
fps.oddLock.Unlock()
|
||||
}
|
||||
|
||||
if fid%2 == 0 {
|
||||
s.evenLock.Lock()
|
||||
s.evenForums[fid] = forumPerms
|
||||
s.evenLock.Unlock()
|
||||
} else {
|
||||
s.oddLock.Lock()
|
||||
s.oddForums[fid] = forumPerms
|
||||
s.oddLock.Unlock()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
groups, err := Groups.GetAll()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fids, err := Forums.GetAllIDs()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *MemoryForumPermsStore) recalcCanSeeAll() error {
|
||||
groups, err := Groups.GetAll()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fids, err := Forums.GetAllIDs()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, group := range groups {
|
||||
DebugLogf("Updating the forum permissions for Group #%d", group.ID)
|
||||
group.CanSee = []int{}
|
||||
for _, fid := range fids {
|
||||
DebugDetailf("Forum #%+v\n", fid)
|
||||
var forumPerms map[int]*ForumPerms
|
||||
var ok bool
|
||||
if fid%2 == 0 {
|
||||
fps.evenLock.RLock()
|
||||
forumPerms, ok = fps.evenForums[fid]
|
||||
fps.evenLock.RUnlock()
|
||||
} else {
|
||||
fps.oddLock.RLock()
|
||||
forumPerms, ok = fps.oddForums[fid]
|
||||
fps.oddLock.RUnlock()
|
||||
}
|
||||
|
||||
gc, ok := Groups.(GroupCache)
|
||||
if !ok {
|
||||
TopicListThaw.Thaw()
|
||||
return nil
|
||||
}
|
||||
var forumPerm *ForumPerms
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
forumPerm, ok = forumPerms[group.ID]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// A separate loop to avoid contending on the odd-even locks as much
|
||||
fForumPerms := make(map[int]map[int]*ForumPerms)
|
||||
for _, fid := range fids {
|
||||
var forumPerms map[int]*ForumPerms
|
||||
var ok bool
|
||||
if fid%2 == 0 {
|
||||
s.evenLock.RLock()
|
||||
forumPerms, ok = s.evenForums[fid]
|
||||
s.evenLock.RUnlock()
|
||||
} else {
|
||||
s.oddLock.RLock()
|
||||
forumPerms, ok = s.oddForums[fid]
|
||||
s.oddLock.RUnlock()
|
||||
}
|
||||
if ok {
|
||||
fForumPerms[fid] = forumPerms
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Can we recalculate CanSee without calculating every other forum?
|
||||
for _, g := range groups {
|
||||
DebugLogf("Updating the forum permissions for Group #%d", g.ID)
|
||||
canSee := []int{}
|
||||
for _, fid := range fids {
|
||||
DebugDetailf("Forum #%+v\n", fid)
|
||||
forumPerms, ok := fForumPerms[fid]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
fp, ok := forumPerms[g.ID]
|
||||
if !ok {
|
||||
if g.Perms.ViewTopic {
|
||||
canSee = append(canSee, fid)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if fp.Overrides {
|
||||
if fp.ViewTopic {
|
||||
canSee = append(canSee, fid)
|
||||
}
|
||||
} else if g.Perms.ViewTopic {
|
||||
canSee = append(canSee, fid)
|
||||
}
|
||||
//DebugDetail("g.ID: ", g.ID)
|
||||
DebugDetailf("forumPerm: %+v\n", fp)
|
||||
DebugDetail("canSee: ", canSee)
|
||||
}
|
||||
DebugDetailf("canSee (length %d): %+v \n", len(canSee), canSee)
|
||||
gc.SetCanSee(g.ID, canSee)
|
||||
}
|
||||
|
||||
return nil
|
||||
if forumPerm.Overrides {
|
||||
if forumPerm.ViewTopic {
|
||||
group.CanSee = append(group.CanSee, fid)
|
||||
}
|
||||
} else if group.Perms.ViewTopic {
|
||||
group.CanSee = append(group.CanSee, fid)
|
||||
}
|
||||
DebugDetail("group.ID: ", group.ID)
|
||||
DebugDetailf("forumPerm: %+v\n", forumPerm)
|
||||
DebugDetail("group.CanSee: ", group.CanSee)
|
||||
}
|
||||
DebugDetailf("group.CanSee (length %d): %+v \n", len(group.CanSee), group.CanSee)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ! Throughput on this might be bad due to the excessive locking
|
||||
func (s *MemoryForumPermsStore) GetAllMap() (bigMap map[int]map[int]*ForumPerms) {
|
||||
bigMap = make(map[int]map[int]*ForumPerms)
|
||||
s.evenLock.RLock()
|
||||
for fid, subMap := range s.evenForums {
|
||||
bigMap[fid] = subMap
|
||||
}
|
||||
s.evenLock.RUnlock()
|
||||
s.oddLock.RLock()
|
||||
for fid, subMap := range s.oddForums {
|
||||
bigMap[fid] = subMap
|
||||
}
|
||||
s.oddLock.RUnlock()
|
||||
return bigMap
|
||||
func (fps *MemoryForumPermsStore) GetAllMap() (bigMap map[int]map[int]*ForumPerms) {
|
||||
bigMap = make(map[int]map[int]*ForumPerms)
|
||||
fps.evenLock.RLock()
|
||||
for fid, subMap := range fps.evenForums {
|
||||
bigMap[fid] = subMap
|
||||
}
|
||||
fps.evenLock.RUnlock()
|
||||
fps.oddLock.RLock()
|
||||
for fid, subMap := range fps.oddForums {
|
||||
bigMap[fid] = subMap
|
||||
}
|
||||
fps.oddLock.RUnlock()
|
||||
return bigMap
|
||||
}
|
||||
|
||||
// TODO: Add a hook here and have plugin_guilds use it
|
||||
// TODO: Check if the forum exists?
|
||||
// TODO: Fix the races
|
||||
// TODO: Return BlankForumPerms() when the forum permission set doesn't exist?
|
||||
func (s *MemoryForumPermsStore) Get(fid, gid int) (fp *ForumPerms, err error) {
|
||||
var fmap map[int]*ForumPerms
|
||||
var ok bool
|
||||
if fid%2 == 0 {
|
||||
s.evenLock.RLock()
|
||||
fmap, ok = s.evenForums[fid]
|
||||
s.evenLock.RUnlock()
|
||||
} else {
|
||||
s.oddLock.RLock()
|
||||
fmap, ok = s.oddForums[fid]
|
||||
s.oddLock.RUnlock()
|
||||
}
|
||||
if !ok {
|
||||
return fp, ErrNoRows
|
||||
}
|
||||
func (fps *MemoryForumPermsStore) Get(fid int, gid int) (fperms *ForumPerms, err error) {
|
||||
var fmap map[int]*ForumPerms
|
||||
var ok bool
|
||||
if fid%2 == 0 {
|
||||
fps.evenLock.RLock()
|
||||
fmap, ok = fps.evenForums[fid]
|
||||
fps.evenLock.RUnlock()
|
||||
} else {
|
||||
fps.oddLock.RLock()
|
||||
fmap, ok = fps.oddForums[fid]
|
||||
fps.oddLock.RUnlock()
|
||||
}
|
||||
if !ok {
|
||||
return fperms, ErrNoRows
|
||||
}
|
||||
|
||||
fp, ok = fmap[gid]
|
||||
if !ok {
|
||||
return fp, ErrNoRows
|
||||
}
|
||||
return fp, nil
|
||||
fperms, ok = fmap[gid]
|
||||
if !ok {
|
||||
return fperms, ErrNoRows
|
||||
}
|
||||
return fperms, nil
|
||||
}
|
||||
|
||||
// TODO: Check if the forum exists?
|
||||
// TODO: Fix the races
|
||||
func (s *MemoryForumPermsStore) GetCopy(fid, gid int) (fp ForumPerms, e error) {
|
||||
fPermsPtr, e := s.Get(fid, gid)
|
||||
if e != nil {
|
||||
return fp, e
|
||||
}
|
||||
return *fPermsPtr, nil
|
||||
func (fps *MemoryForumPermsStore) GetCopy(fid int, gid int) (fperms ForumPerms, err error) {
|
||||
fPermsPtr, err := fps.Get(fid, gid)
|
||||
if err != nil {
|
||||
return fperms, err
|
||||
}
|
||||
return *fPermsPtr, nil
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
*
|
||||
* Gosora Forum Store
|
||||
* Copyright Azareal 2017 - 2020
|
||||
* Gosora Forum Store
|
||||
* Copyright Azareal 2017 - 2018
|
||||
*
|
||||
*/
|
||||
package common
|
||||
@ -10,443 +10,358 @@ import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"log"
|
||||
|
||||
//"fmt"
|
||||
"sort"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var forumCreateMutex sync.Mutex
|
||||
var forumPerms map[int]map[int]*ForumPerms // [gid][fid]*ForumPerms // TODO: Add an abstraction around this and make it more thread-safe
|
||||
var Forums ForumStore
|
||||
var ErrBlankName = errors.New("The name must not be blank")
|
||||
var ErrNoDeleteReports = errors.New("You cannot delete the Reports forum")
|
||||
|
||||
// ForumStore is an interface for accessing the forums and the metadata stored on them
|
||||
type ForumStore interface {
|
||||
LoadForums() error
|
||||
Each(h func(*Forum) error) error
|
||||
DirtyGet(id int) *Forum
|
||||
Get(id int) (*Forum, error)
|
||||
BypassGet(id int) (*Forum, error)
|
||||
BulkGetCopy(ids []int) (forums []Forum, err error)
|
||||
Reload(id int) error // ? - Should we move this to ForumCache? It might require us to do some unnecessary casting though
|
||||
//Update(Forum) error
|
||||
Delete(id int) error
|
||||
AddTopic(tid, uid, fid int) error
|
||||
RemoveTopic(fid int) error
|
||||
RemoveTopics(fid, count int) error
|
||||
UpdateLastTopic(tid, uid, fid int) error
|
||||
Exists(id int) bool
|
||||
GetAll() ([]*Forum, error)
|
||||
GetAllIDs() ([]int, error)
|
||||
GetAllVisible() ([]*Forum, error)
|
||||
GetAllVisibleIDs() ([]int, error)
|
||||
//GetChildren(parentID int, parentType string) ([]*Forum,error)
|
||||
//GetFirstChild(parentID int, parentType string) (*Forum,error)
|
||||
Create(name, desc string, active bool, preset string) (int, error)
|
||||
UpdateOrder(updateMap map[int]int) error
|
||||
LoadForums() error
|
||||
DirtyGet(id int) *Forum
|
||||
Get(id int) (*Forum, error)
|
||||
BypassGet(id int) (*Forum, error)
|
||||
BulkGetCopy(ids []int) (forums []Forum, err error)
|
||||
Reload(id int) error // ? - Should we move this to ForumCache? It might require us to do some unnecessary casting though
|
||||
//Update(Forum) error
|
||||
Delete(id int) error
|
||||
AddTopic(tid int, uid int, fid int) error
|
||||
RemoveTopic(fid int) error
|
||||
UpdateLastTopic(tid int, uid int, fid int) error
|
||||
Exists(id int) bool
|
||||
GetAll() ([]*Forum, error)
|
||||
GetAllIDs() ([]int, error)
|
||||
GetAllVisible() ([]*Forum, error)
|
||||
GetAllVisibleIDs() ([]int, error)
|
||||
//GetChildren(parentID int, parentType string) ([]*Forum,error)
|
||||
//GetFirstChild(parentID int, parentType string) (*Forum,error)
|
||||
Create(forumName string, forumDesc string, active bool, preset string) (int, error)
|
||||
|
||||
Count() int
|
||||
GlobalCount() int
|
||||
}
|
||||
|
||||
type ForumCache interface {
|
||||
CacheGet(id int) (*Forum, error)
|
||||
CacheSet(f *Forum) error
|
||||
CacheDelete(id int)
|
||||
Length() int
|
||||
CacheGet(id int) (*Forum, error)
|
||||
CacheSet(forum *Forum) error
|
||||
CacheDelete(id int)
|
||||
Length() int
|
||||
}
|
||||
|
||||
// MemoryForumStore is a struct which holds an arbitrary number of forums in memory, usually all of them, although we might introduce functionality to hold a smaller subset in memory for sites with an extremely large number of forums
|
||||
type MemoryForumStore struct {
|
||||
forums sync.Map // map[int]*Forum
|
||||
forumView atomic.Value // []*Forum
|
||||
forums sync.Map // map[int]*Forum
|
||||
forumView atomic.Value // []*Forum
|
||||
|
||||
get *sql.Stmt
|
||||
getAll *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
create *sql.Stmt
|
||||
count *sql.Stmt
|
||||
updateCache *sql.Stmt
|
||||
addTopics *sql.Stmt
|
||||
removeTopics *sql.Stmt
|
||||
lastTopic *sql.Stmt
|
||||
updateOrder *sql.Stmt
|
||||
get *sql.Stmt
|
||||
getAll *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
create *sql.Stmt
|
||||
count *sql.Stmt
|
||||
updateCache *sql.Stmt
|
||||
addTopics *sql.Stmt
|
||||
removeTopics *sql.Stmt
|
||||
}
|
||||
|
||||
// NewMemoryForumStore gives you a new instance of MemoryForumStore
|
||||
func NewMemoryForumStore() (*MemoryForumStore, error) {
|
||||
acc := qgen.NewAcc()
|
||||
f := "forums"
|
||||
set := func(s string) *sql.Stmt {
|
||||
return acc.Update(f).Set(s).Where("fid=?").Prepare()
|
||||
}
|
||||
// TODO: Do a proper delete
|
||||
return &MemoryForumStore{
|
||||
get: acc.Select(f).Columns("name, desc, tmpl, active, order, preset, parentID, parentType, topicCount, lastTopicID, lastReplyerID").Where("fid=?").Prepare(),
|
||||
getAll: acc.Select(f).Columns("fid, name, desc, tmpl, active, order, preset, parentID, parentType, topicCount, lastTopicID, lastReplyerID").Orderby("order ASC, fid ASC").Prepare(),
|
||||
delete: set("name='',active=0"),
|
||||
create: acc.Insert(f).Columns("name,desc,tmpl,active,preset").Fields("?,?,'',?,?").Prepare(),
|
||||
count: acc.Count(f).Where("name != ''").Prepare(),
|
||||
updateCache: set("lastTopicID=?,lastReplyerID=?"),
|
||||
addTopics: set("topicCount=topicCount+?"),
|
||||
removeTopics: set("topicCount=topicCount-?"),
|
||||
lastTopic: acc.Select("topics").Columns("tid").Where("parentID=?").Orderby("lastReplyAt DESC,createdAt DESC").Limit("1").Prepare(),
|
||||
updateOrder: set("order=?"),
|
||||
}, acc.FirstError()
|
||||
acc := qgen.NewAcc()
|
||||
// TODO: Do a proper delete
|
||||
return &MemoryForumStore{
|
||||
get: acc.Select("forums").Columns("name, desc, active, preset, parentID, parentType, topicCount, lastTopicID, lastReplyerID").Where("fid = ?").Prepare(),
|
||||
getAll: acc.Select("forums").Columns("fid, name, desc, active, preset, parentID, parentType, topicCount, lastTopicID, lastReplyerID").Orderby("fid ASC").Prepare(),
|
||||
delete: acc.Update("forums").Set("name= '', active = 0").Where("fid = ?").Prepare(),
|
||||
create: acc.Insert("forums").Columns("name, desc, active, preset").Fields("?,?,?,?").Prepare(),
|
||||
count: acc.Count("forums").Where("name != ''").Prepare(),
|
||||
updateCache: acc.Update("forums").Set("lastTopicID = ?, lastReplyerID = ?").Where("fid = ?").Prepare(),
|
||||
addTopics: acc.Update("forums").Set("topicCount = topicCount + ?").Where("fid = ?").Prepare(),
|
||||
removeTopics: acc.Update("forums").Set("topicCount = topicCount - ?").Where("fid = ?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
// TODO: Rename to ReloadAll?
|
||||
// TODO: Add support for subforums
|
||||
func (s *MemoryForumStore) LoadForums() error {
|
||||
var forumView []*Forum
|
||||
addForum := func(f *Forum) {
|
||||
s.forums.Store(f.ID, f)
|
||||
if f.Active && f.Name != "" && f.ParentType == "" {
|
||||
forumView = append(forumView, f)
|
||||
}
|
||||
}
|
||||
func (mfs *MemoryForumStore) LoadForums() error {
|
||||
var forumView []*Forum
|
||||
addForum := func(forum *Forum) {
|
||||
mfs.forums.Store(forum.ID, forum)
|
||||
if forum.Active && forum.Name != "" && forum.ParentType == "" {
|
||||
forumView = append(forumView, forum)
|
||||
}
|
||||
}
|
||||
|
||||
rows, err := s.getAll.Query()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
rows, err := mfs.getAll.Query()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
i := 0
|
||||
for ; rows.Next(); i++ {
|
||||
f := &Forum{ID: 0, Active: true, Preset: "all"}
|
||||
err = rows.Scan(&f.ID, &f.Name, &f.Desc, &f.Tmpl, &f.Active, &f.Order, &f.Preset, &f.ParentID, &f.ParentType, &f.TopicCount, &f.LastTopicID, &f.LastReplyerID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var i = 0
|
||||
for ; rows.Next(); i++ {
|
||||
forum := &Forum{ID: 0, Active: true, Preset: "all"}
|
||||
err = rows.Scan(&forum.ID, &forum.Name, &forum.Desc, &forum.Active, &forum.Preset, &forum.ParentID, &forum.ParentType, &forum.TopicCount, &forum.LastTopicID, &forum.LastReplyerID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if f.Name == "" {
|
||||
DebugLog("Adding a placeholder forum")
|
||||
} else {
|
||||
log.Printf("Adding the '%s' forum", f.Name)
|
||||
}
|
||||
if forum.Name == "" {
|
||||
DebugLog("Adding a placeholder forum")
|
||||
} else {
|
||||
log.Printf("Adding the '%s' forum", forum.Name)
|
||||
}
|
||||
|
||||
f.Link = BuildForumURL(NameToSlug(f.Name), f.ID)
|
||||
f.LastTopic = Topics.DirtyGet(f.LastTopicID)
|
||||
f.LastReplyer = Users.DirtyGet(f.LastReplyerID)
|
||||
// TODO: Create a specialised function with a bit less overhead for getting the last page for a post count
|
||||
_, _, lastPage := PageOffset(f.LastTopic.PostCount, 1, Config.ItemsPerPage)
|
||||
f.LastPage = lastPage
|
||||
addForum(f)
|
||||
}
|
||||
s.forumView.Store(forumView)
|
||||
TopicListThaw.Thaw()
|
||||
return rows.Err()
|
||||
forum.Link = BuildForumURL(NameToSlug(forum.Name), forum.ID)
|
||||
forum.LastTopic = Topics.DirtyGet(forum.LastTopicID)
|
||||
forum.LastReplyer = Users.DirtyGet(forum.LastReplyerID)
|
||||
|
||||
addForum(forum)
|
||||
}
|
||||
mfs.forumView.Store(forumView)
|
||||
return rows.Err()
|
||||
}
|
||||
|
||||
// TODO: Hide social groups too
|
||||
// ? - Will this be hit a lot by plugin_guilds?
|
||||
func (s *MemoryForumStore) rebuildView() {
|
||||
var forumView []*Forum
|
||||
s.forums.Range(func(_, val interface{}) bool {
|
||||
f := val.(*Forum)
|
||||
// ? - ParentType blank means that it doesn't have a parent
|
||||
if f.Active && f.Name != "" && f.ParentType == "" {
|
||||
forumView = append(forumView, f)
|
||||
}
|
||||
return true
|
||||
})
|
||||
sort.Sort(SortForum(forumView))
|
||||
s.forumView.Store(forumView)
|
||||
TopicListThaw.Thaw()
|
||||
func (mfs *MemoryForumStore) rebuildView() {
|
||||
var forumView []*Forum
|
||||
mfs.forums.Range(func(_ interface{}, value interface{}) bool {
|
||||
forum := value.(*Forum)
|
||||
// ? - ParentType blank means that it doesn't have a parent
|
||||
if forum.Active && forum.Name != "" && forum.ParentType == "" {
|
||||
forumView = append(forumView, forum)
|
||||
}
|
||||
return true
|
||||
})
|
||||
sort.Sort(SortForum(forumView))
|
||||
mfs.forumView.Store(forumView)
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) Each(h func(*Forum) error) (err error) {
|
||||
s.forums.Range(func(_, val interface{}) bool {
|
||||
err = h(val.(*Forum))
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
return err
|
||||
func (mfs *MemoryForumStore) DirtyGet(id int) *Forum {
|
||||
fint, ok := mfs.forums.Load(id)
|
||||
if !ok || fint.(*Forum).Name == "" {
|
||||
return &Forum{ID: -1, Name: ""}
|
||||
}
|
||||
return fint.(*Forum)
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) DirtyGet(id int) *Forum {
|
||||
fint, ok := s.forums.Load(id)
|
||||
if !ok || fint.(*Forum).Name == "" {
|
||||
return &Forum{ID: -1, Name: ""}
|
||||
}
|
||||
return fint.(*Forum)
|
||||
func (mfs *MemoryForumStore) CacheGet(id int) (*Forum, error) {
|
||||
fint, ok := mfs.forums.Load(id)
|
||||
if !ok || fint.(*Forum).Name == "" {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
return fint.(*Forum), nil
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) CacheGet(id int) (*Forum, error) {
|
||||
fint, ok := s.forums.Load(id)
|
||||
if !ok || fint.(*Forum).Name == "" {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
return fint.(*Forum), nil
|
||||
func (mfs *MemoryForumStore) Get(id int) (*Forum, error) {
|
||||
fint, ok := mfs.forums.Load(id)
|
||||
if !ok || fint.(*Forum).Name == "" {
|
||||
var forum = &Forum{ID: id}
|
||||
err := mfs.get.QueryRow(id).Scan(&forum.Name, &forum.Desc, &forum.Active, &forum.Preset, &forum.ParentID, &forum.ParentType, &forum.TopicCount, &forum.LastTopicID, &forum.LastReplyerID)
|
||||
if err != nil {
|
||||
return forum, err
|
||||
}
|
||||
|
||||
forum.Link = BuildForumURL(NameToSlug(forum.Name), forum.ID)
|
||||
forum.LastTopic = Topics.DirtyGet(forum.LastTopicID)
|
||||
forum.LastReplyer = Users.DirtyGet(forum.LastReplyerID)
|
||||
|
||||
mfs.CacheSet(forum)
|
||||
return forum, err
|
||||
}
|
||||
return fint.(*Forum), nil
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) Get(id int) (*Forum, error) {
|
||||
fint, ok := s.forums.Load(id)
|
||||
if ok {
|
||||
forum := fint.(*Forum)
|
||||
if forum.Name == "" {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
return forum, nil
|
||||
}
|
||||
func (mfs *MemoryForumStore) BypassGet(id int) (*Forum, error) {
|
||||
var forum = &Forum{ID: id}
|
||||
err := mfs.get.QueryRow(id).Scan(&forum.Name, &forum.Desc, &forum.Active, &forum.Preset, &forum.ParentID, &forum.ParentType, &forum.TopicCount, &forum.LastTopicID, &forum.LastReplyerID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
forum, err := s.BypassGet(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.CacheSet(forum)
|
||||
return forum, err
|
||||
}
|
||||
forum.Link = BuildForumURL(NameToSlug(forum.Name), forum.ID)
|
||||
forum.LastTopic = Topics.DirtyGet(forum.LastTopicID)
|
||||
forum.LastReplyer = Users.DirtyGet(forum.LastReplyerID)
|
||||
|
||||
func (s *MemoryForumStore) BypassGet(id int) (*Forum, error) {
|
||||
f := &Forum{ID: id}
|
||||
err := s.get.QueryRow(id).Scan(&f.Name, &f.Desc, &f.Tmpl, &f.Active, &f.Order, &f.Preset, &f.ParentID, &f.ParentType, &f.TopicCount, &f.LastTopicID, &f.LastReplyerID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if f.Name == "" {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
f.Link = BuildForumURL(NameToSlug(f.Name), f.ID)
|
||||
f.LastTopic = Topics.DirtyGet(f.LastTopicID)
|
||||
f.LastReplyer = Users.DirtyGet(f.LastReplyerID)
|
||||
// TODO: Create a specialised function with a bit less overhead for getting the last page for a post count
|
||||
_, _, lastPage := PageOffset(f.LastTopic.PostCount, 1, Config.ItemsPerPage)
|
||||
f.LastPage = lastPage
|
||||
//TopicListThaw.Thaw()
|
||||
|
||||
return f, err
|
||||
return forum, err
|
||||
}
|
||||
|
||||
// TODO: Optimise this
|
||||
func (s *MemoryForumStore) BulkGetCopy(ids []int) (forums []Forum, err error) {
|
||||
forums = make([]Forum, len(ids))
|
||||
for i, id := range ids {
|
||||
f, err := s.Get(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
forums[i] = f.Copy()
|
||||
}
|
||||
return forums, nil
|
||||
func (mfs *MemoryForumStore) BulkGetCopy(ids []int) (forums []Forum, err error) {
|
||||
forums = make([]Forum, len(ids))
|
||||
for i, id := range ids {
|
||||
forum, err := mfs.Get(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
forums[i] = forum.Copy()
|
||||
}
|
||||
return forums, nil
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) Reload(id int) error {
|
||||
forum, err := s.BypassGet(id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.CacheSet(forum)
|
||||
return nil
|
||||
func (mfs *MemoryForumStore) Reload(id int) error {
|
||||
var forum = &Forum{ID: id}
|
||||
err := mfs.get.QueryRow(id).Scan(&forum.Name, &forum.Desc, &forum.Active, &forum.Preset, &forum.ParentID, &forum.ParentType, &forum.TopicCount, &forum.LastTopicID, &forum.LastReplyerID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
forum.Link = BuildForumURL(NameToSlug(forum.Name), forum.ID)
|
||||
forum.LastTopic = Topics.DirtyGet(forum.LastTopicID)
|
||||
forum.LastReplyer = Users.DirtyGet(forum.LastReplyerID)
|
||||
|
||||
mfs.CacheSet(forum)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) CacheSet(f *Forum) error {
|
||||
s.forums.Store(f.ID, f)
|
||||
s.rebuildView()
|
||||
return nil
|
||||
func (mfs *MemoryForumStore) CacheSet(forum *Forum) error {
|
||||
mfs.forums.Store(forum.ID, forum)
|
||||
mfs.rebuildView()
|
||||
return nil
|
||||
}
|
||||
|
||||
// ! Has a randomised order
|
||||
func (s *MemoryForumStore) GetAll() (forumView []*Forum, err error) {
|
||||
s.forums.Range(func(_, val interface{}) bool {
|
||||
forumView = append(forumView, val.(*Forum))
|
||||
return true
|
||||
})
|
||||
sort.Sort(SortForum(forumView))
|
||||
return forumView, nil
|
||||
func (mfs *MemoryForumStore) GetAll() (forumView []*Forum, err error) {
|
||||
mfs.forums.Range(func(_ interface{}, value interface{}) bool {
|
||||
forumView = append(forumView, value.(*Forum))
|
||||
return true
|
||||
})
|
||||
sort.Sort(SortForum(forumView))
|
||||
return forumView, nil
|
||||
}
|
||||
|
||||
// ? - Can we optimise the sorting?
|
||||
func (s *MemoryForumStore) GetAllIDs() (ids []int, err error) {
|
||||
s.forums.Range(func(_, val interface{}) bool {
|
||||
ids = append(ids, val.(*Forum).ID)
|
||||
return true
|
||||
})
|
||||
sort.Ints(ids)
|
||||
return ids, nil
|
||||
func (mfs *MemoryForumStore) GetAllIDs() (ids []int, err error) {
|
||||
mfs.forums.Range(func(_ interface{}, value interface{}) bool {
|
||||
ids = append(ids, value.(*Forum).ID)
|
||||
return true
|
||||
})
|
||||
sort.Ints(ids)
|
||||
return ids, nil
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) GetAllVisible() (forumView []*Forum, err error) {
|
||||
forumView = s.forumView.Load().([]*Forum)
|
||||
return forumView, nil
|
||||
func (mfs *MemoryForumStore) GetAllVisible() (forumView []*Forum, err error) {
|
||||
forumView = mfs.forumView.Load().([]*Forum)
|
||||
return forumView, nil
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) GetAllVisibleIDs() ([]int, error) {
|
||||
forumView := s.forumView.Load().([]*Forum)
|
||||
ids := make([]int, len(forumView))
|
||||
for i := 0; i < len(forumView); i++ {
|
||||
ids[i] = forumView[i].ID
|
||||
}
|
||||
return ids, nil
|
||||
func (mfs *MemoryForumStore) GetAllVisibleIDs() ([]int, error) {
|
||||
forumView := mfs.forumView.Load().([]*Forum)
|
||||
var ids = make([]int, len(forumView))
|
||||
for i := 0; i < len(forumView); i++ {
|
||||
ids[i] = forumView[i].ID
|
||||
}
|
||||
return ids, nil
|
||||
}
|
||||
|
||||
// TODO: Implement sub-forums.
|
||||
/*func (s *MemoryForumStore) GetChildren(parentID int, parentType string) ([]*Forum,error) {
|
||||
return nil, nil
|
||||
/*func (mfs *MemoryForumStore) GetChildren(parentID int, parentType string) ([]*Forum,error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (s *MemoryForumStore) GetFirstChild(parentID int, parentType string) (*Forum,error) {
|
||||
return nil, nil
|
||||
func (mfs *MemoryForumStore) GetFirstChild(parentID int, parentType string) (*Forum,error) {
|
||||
return nil, nil
|
||||
}*/
|
||||
|
||||
// TODO: Add a query for this rather than hitting cache
|
||||
func (s *MemoryForumStore) Exists(id int) bool {
|
||||
forum, ok := s.forums.Load(id)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return forum.(*Forum).Name != ""
|
||||
func (mfs *MemoryForumStore) Exists(id int) bool {
|
||||
forum, ok := mfs.forums.Load(id)
|
||||
return ok && forum.(*Forum).Name != ""
|
||||
}
|
||||
|
||||
// TODO: Batch deletions with name blanking? Is this necessary?
|
||||
func (s *MemoryForumStore) CacheDelete(id int) {
|
||||
s.forums.Delete(id)
|
||||
s.rebuildView()
|
||||
func (mfs *MemoryForumStore) CacheDelete(id int) {
|
||||
mfs.forums.Delete(id)
|
||||
mfs.rebuildView()
|
||||
}
|
||||
|
||||
// TODO: Add a hook to allow plugin_guilds to detect when one of it's forums has just been deleted?
|
||||
func (s *MemoryForumStore) Delete(id int) error {
|
||||
if id == ReportForumID {
|
||||
return ErrNoDeleteReports
|
||||
}
|
||||
_, err := s.delete.Exec(id)
|
||||
s.CacheDelete(id)
|
||||
return err
|
||||
func (mfs *MemoryForumStore) Delete(id int) error {
|
||||
if id == ReportForumID {
|
||||
return errors.New("You cannot delete the Reports forum")
|
||||
}
|
||||
_, err := mfs.delete.Exec(id)
|
||||
mfs.CacheDelete(id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) AddTopic(tid, uid, fid int) error {
|
||||
_, err := s.updateCache.Exec(tid, uid, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = s.addTopics.Exec(1, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Bypass the database and update this with a lock or an unsafe atomic swap
|
||||
return s.Reload(fid)
|
||||
func (mfs *MemoryForumStore) AddTopic(tid int, uid int, fid int) error {
|
||||
_, err := mfs.updateCache.Exec(tid, uid, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = mfs.addTopics.Exec(1, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Bypass the database and update this with a lock or an unsafe atomic swap
|
||||
return mfs.Reload(fid)
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) RefreshTopic(fid int) (err error) {
|
||||
var tid int
|
||||
err = s.lastTopic.QueryRow(fid).Scan(&tid)
|
||||
if err == sql.ErrNoRows {
|
||||
f, err := s.CacheGet(fid)
|
||||
if err != nil || f.LastTopicID != 0 {
|
||||
_, err = s.updateCache.Exec(0, 0, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.Reload(fid)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
topic, err := Topics.Get(tid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = s.updateCache.Exec(tid, topic.CreatedBy, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Bypass the database and update this with a lock or an unsafe atomic swap
|
||||
s.Reload(fid)
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: Make this update more atomic
|
||||
func (s *MemoryForumStore) RemoveTopic(fid int) error {
|
||||
_, err := s.removeTopics.Exec(1, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return s.RefreshTopic(fid)
|
||||
}
|
||||
func (s *MemoryForumStore) RemoveTopics(fid, count int) error {
|
||||
_, err := s.removeTopics.Exec(count, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return s.RefreshTopic(fid)
|
||||
// TODO: Update the forum cache with the latest topic
|
||||
func (mfs *MemoryForumStore) RemoveTopic(fid int) error {
|
||||
_, err := mfs.removeTopics.Exec(1, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Bypass the database and update this with a lock or an unsafe atomic swap
|
||||
mfs.Reload(fid)
|
||||
return nil
|
||||
}
|
||||
|
||||
// DEPRECATED. forum.Update() will be the way to do this in the future, once it's completed
|
||||
// TODO: Have a pointer to the last topic rather than storing it on the forum itself
|
||||
func (s *MemoryForumStore) UpdateLastTopic(tid, uid, fid int) error {
|
||||
_, err := s.updateCache.Exec(tid, uid, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Bypass the database and update this with a lock or an unsafe atomic swap
|
||||
return s.Reload(fid)
|
||||
func (mfs *MemoryForumStore) UpdateLastTopic(tid int, uid int, fid int) error {
|
||||
_, err := mfs.updateCache.Exec(tid, uid, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Bypass the database and update this with a lock or an unsafe atomic swap
|
||||
return mfs.Reload(fid)
|
||||
}
|
||||
|
||||
func (s *MemoryForumStore) Create(name, desc string, active bool, preset string) (int, error) {
|
||||
if name == "" {
|
||||
return 0, ErrBlankName
|
||||
}
|
||||
forumCreateMutex.Lock()
|
||||
defer forumCreateMutex.Unlock()
|
||||
func (mfs *MemoryForumStore) Create(forumName string, forumDesc string, active bool, preset string) (int, error) {
|
||||
forumCreateMutex.Lock()
|
||||
res, err := mfs.create.Exec(forumName, forumDesc, active, preset)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
res, err := s.create.Exec(name, desc, active, preset)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
fid64, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
fid := int(fid64)
|
||||
|
||||
fid64, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
fid := int(fid64)
|
||||
err = mfs.Reload(fid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
err = s.Reload(fid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
PermmapToQuery(PresetToPermmap(preset), fid)
|
||||
return fid, nil
|
||||
}
|
||||
|
||||
// TODO: Make this atomic, maybe with a transaction?
|
||||
func (s *MemoryForumStore) UpdateOrder(updateMap map[int]int) error {
|
||||
for fid, order := range updateMap {
|
||||
_, err := s.updateOrder.Exec(order, fid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return s.LoadForums()
|
||||
PermmapToQuery(PresetToPermmap(preset), fid)
|
||||
forumCreateMutex.Unlock()
|
||||
return fid, nil
|
||||
}
|
||||
|
||||
// ! Might be slightly inaccurate, if the sync.Map is constantly shifting and churning, but it'll stabilise eventually. Also, slow. Don't use this on every request x.x
|
||||
// Length returns the number of forums in the memory cache
|
||||
func (s *MemoryForumStore) Length() (len int) {
|
||||
s.forums.Range(func(_, _ interface{}) bool {
|
||||
len++
|
||||
return true
|
||||
})
|
||||
return len
|
||||
func (mfs *MemoryForumStore) Length() (length int) {
|
||||
mfs.forums.Range(func(_ interface{}, value interface{}) bool {
|
||||
length++
|
||||
return true
|
||||
})
|
||||
return length
|
||||
}
|
||||
|
||||
// TODO: Get the total count of forums in the forum store rather than doing a heavy query for this?
|
||||
// Count returns the total number of forums
|
||||
func (s *MemoryForumStore) Count() (count int) {
|
||||
err := s.count.QueryRow().Scan(&count)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return count
|
||||
// GlobalCount returns the total number of forums
|
||||
func (mfs *MemoryForumStore) GlobalCount() (fcount int) {
|
||||
err := mfs.count.QueryRow().Scan(&fcount)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return fcount
|
||||
}
|
||||
|
||||
// TODO: Work on SqlForumStore
|
||||
|
116
common/group.go
116
common/group.go
@ -1,101 +1,63 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
import "database/sql"
|
||||
import "../query_gen/lib"
|
||||
|
||||
var blankGroup = Group{ID: 0, Name: ""}
|
||||
|
||||
type GroupAdmin struct {
|
||||
ID int
|
||||
Name string
|
||||
Rank string
|
||||
RankClass string
|
||||
CanEdit bool
|
||||
CanDelete bool
|
||||
ID int
|
||||
Name string
|
||||
Rank string
|
||||
RankClass string
|
||||
CanEdit bool
|
||||
CanDelete bool
|
||||
}
|
||||
|
||||
// ! Fix the data races in the fperms
|
||||
type Group struct {
|
||||
ID int
|
||||
Name string
|
||||
IsMod bool
|
||||
IsAdmin bool
|
||||
IsBanned bool
|
||||
Tag string
|
||||
Perms Perms
|
||||
PermissionsText []byte
|
||||
PluginPerms map[string]bool // Custom permissions defined by plugins. What if two plugins declare the same permission, but they handle them in incompatible ways? Very unlikely, we probably don't need to worry about this, the plugin authors should be aware of each other to some extent
|
||||
PluginPermsText []byte
|
||||
CanSee []int // The IDs of the forums this group can see
|
||||
UserCount int // ! Might be temporary as I might want to lean on the database instead for this
|
||||
ID int
|
||||
Name string
|
||||
IsMod bool
|
||||
IsAdmin bool
|
||||
IsBanned bool
|
||||
Tag string
|
||||
Perms Perms
|
||||
PermissionsText []byte
|
||||
PluginPerms map[string]bool // Custom permissions defined by plugins. What if two plugins declare the same permission, but they handle them in incompatible ways? Very unlikely, we probably don't need to worry about this, the plugin authors should be aware of each other to some extent
|
||||
PluginPermsText []byte
|
||||
CanSee []int // The IDs of the forums this group can see
|
||||
UserCount int // ! Might be temporary as I might want to lean on the database instead for this
|
||||
}
|
||||
|
||||
type GroupStmts struct {
|
||||
updateGroup *sql.Stmt
|
||||
updateGroupRank *sql.Stmt
|
||||
updateGroupPerms *sql.Stmt
|
||||
updateGroupRank *sql.Stmt
|
||||
}
|
||||
|
||||
var groupStmts GroupStmts
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
set := func(s string) *sql.Stmt {
|
||||
return acc.Update("users_groups").Set(s).Where("gid=?").Prepare()
|
||||
}
|
||||
groupStmts = GroupStmts{
|
||||
updateGroup: set("name=?,tag=?"),
|
||||
updateGroupRank: set("is_admin=?,is_mod=?,is_banned=?"),
|
||||
updateGroupPerms: set("permissions=?"),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
groupStmts = GroupStmts{
|
||||
updateGroupRank: acc.Update("users_groups").Set("is_admin = ?, is_mod = ?, is_banned = ?").Where("gid = ?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
func (g *Group) ChangeRank(isAdmin, isMod, isBanned bool) (err error) {
|
||||
_, err = groupStmts.updateGroupRank.Exec(isAdmin, isMod, isBanned, g.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = Groups.Reload(g.ID)
|
||||
return nil
|
||||
}
|
||||
func (group *Group) ChangeRank(isAdmin bool, isMod bool, isBanned bool) (err error) {
|
||||
_, err = groupStmts.updateGroupRank.Exec(isAdmin, isMod, isBanned, group.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
func (g *Group) Update(name, tag string) (err error) {
|
||||
_, err = groupStmts.updateGroup.Exec(name, tag, g.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = Groups.Reload(g.ID)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Please don't pass arbitrary inputs to this method
|
||||
func (g *Group) UpdatePerms(perms map[string]bool) (err error) {
|
||||
pjson, err := json.Marshal(perms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = groupStmts.updateGroupPerms.Exec(pjson, g.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return Groups.Reload(g.ID)
|
||||
Groups.Reload(group.ID)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Copy gives you a non-pointer concurrency safe copy of the group
|
||||
func (g *Group) Copy() Group {
|
||||
return *g
|
||||
}
|
||||
|
||||
func (g *Group) CopyPtr() (co *Group) {
|
||||
co = new(Group)
|
||||
*co = *g
|
||||
return co
|
||||
func (group *Group) Copy() Group {
|
||||
return *group
|
||||
}
|
||||
|
||||
// TODO: Replace this sorting mechanism with something a lot more efficient
|
||||
@ -103,11 +65,11 @@ func (g *Group) CopyPtr() (co *Group) {
|
||||
type SortGroup []*Group
|
||||
|
||||
func (sg SortGroup) Len() int {
|
||||
return len(sg)
|
||||
return len(sg)
|
||||
}
|
||||
func (sg SortGroup) Swap(i, j int) {
|
||||
sg[i], sg[j] = sg[j], sg[i]
|
||||
sg[i], sg[j] = sg[j], sg[i]
|
||||
}
|
||||
func (sg SortGroup) Less(i, j int) bool {
|
||||
return sg[i].ID < sg[j].ID
|
||||
return sg[i].ID < sg[j].ID
|
||||
}
|
||||
|
@ -7,369 +7,350 @@ import (
|
||||
"errors"
|
||||
"log"
|
||||
"sort"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var Groups GroupStore
|
||||
|
||||
// ? - We could fallback onto the database when an item can't be found in the cache?
|
||||
type GroupStore interface {
|
||||
LoadGroups() error
|
||||
DirtyGet(id int) *Group
|
||||
Get(id int) (*Group, error)
|
||||
GetCopy(id int) (Group, error)
|
||||
Exists(id int) bool
|
||||
Create(name, tag string, isAdmin, isMod, isBanned bool) (id int, err error)
|
||||
GetAll() ([]*Group, error)
|
||||
GetRange(lower, higher int) ([]*Group, error)
|
||||
Reload(id int) error // ? - Should we move this to GroupCache? It might require us to do some unnecessary casting though
|
||||
Count() int
|
||||
LoadGroups() error
|
||||
DirtyGet(id int) *Group
|
||||
Get(id int) (*Group, error)
|
||||
GetCopy(id int) (Group, error)
|
||||
Exists(id int) bool
|
||||
Create(name string, tag string, isAdmin bool, isMod bool, isBanned bool) (int, error)
|
||||
GetAll() ([]*Group, error)
|
||||
GetRange(lower int, higher int) ([]*Group, error)
|
||||
Reload(id int) error // ? - Should we move this to GroupCache? It might require us to do some unnecessary casting though
|
||||
GlobalCount() int
|
||||
}
|
||||
|
||||
type GroupCache interface {
|
||||
CacheSet(g *Group) error
|
||||
SetCanSee(gid int, canSee []int) error
|
||||
CacheAdd(g *Group) error
|
||||
Length() int
|
||||
CacheSet(group *Group) error
|
||||
Length() int
|
||||
}
|
||||
|
||||
type MemoryGroupStore struct {
|
||||
groups map[int]*Group // TODO: Use a sync.Map instead of a map?
|
||||
groupCount int
|
||||
getAll *sql.Stmt
|
||||
get *sql.Stmt
|
||||
count *sql.Stmt
|
||||
userCount *sql.Stmt
|
||||
groups map[int]*Group // TODO: Use a sync.Map instead of a map?
|
||||
groupCount int
|
||||
getAll *sql.Stmt
|
||||
get *sql.Stmt
|
||||
count *sql.Stmt
|
||||
userCount *sql.Stmt
|
||||
|
||||
sync.RWMutex
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
func NewMemoryGroupStore() (*MemoryGroupStore, error) {
|
||||
acc := qgen.NewAcc()
|
||||
ug := "users_groups"
|
||||
return &MemoryGroupStore{
|
||||
groups: make(map[int]*Group),
|
||||
groupCount: 0,
|
||||
getAll: acc.Select(ug).Columns("gid,name,permissions,plugin_perms,is_mod,is_admin,is_banned,tag").Prepare(),
|
||||
get: acc.Select(ug).Columns("name,permissions,plugin_perms,is_mod,is_admin,is_banned,tag").Where("gid=?").Prepare(),
|
||||
count: acc.Count(ug).Prepare(),
|
||||
userCount: acc.Count("users").Where("group=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
acc := qgen.NewAcc()
|
||||
return &MemoryGroupStore{
|
||||
groups: make(map[int]*Group),
|
||||
groupCount: 0,
|
||||
getAll: acc.Select("users_groups").Columns("gid, name, permissions, plugin_perms, is_mod, is_admin, is_banned, tag").Prepare(),
|
||||
get: acc.Select("users_groups").Columns("name, permissions, plugin_perms, is_mod, is_admin, is_banned, tag").Where("gid = ?").Prepare(),
|
||||
count: acc.Count("users_groups").Prepare(),
|
||||
userCount: acc.Count("users").Where("group = ?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
// TODO: Move this query from the global stmt store into this store
|
||||
func (s *MemoryGroupStore) LoadGroups() error {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
s.groups[0] = &Group{ID: 0, Name: "Unknown"}
|
||||
func (mgs *MemoryGroupStore) LoadGroups() error {
|
||||
mgs.Lock()
|
||||
defer mgs.Unlock()
|
||||
mgs.groups[0] = &Group{ID: 0, Name: "Unknown"}
|
||||
|
||||
rows, err := s.getAll.Query()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
rows, err := mgs.getAll.Query()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
i := 1
|
||||
for ; rows.Next(); i++ {
|
||||
g := &Group{ID: 0}
|
||||
err := rows.Scan(&g.ID, &g.Name, &g.PermissionsText, &g.PluginPermsText, &g.IsMod, &g.IsAdmin, &g.IsBanned, &g.Tag)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
i := 1
|
||||
for ; rows.Next(); i++ {
|
||||
group := &Group{ID: 0}
|
||||
err := rows.Scan(&group.ID, &group.Name, &group.PermissionsText, &group.PluginPermsText, &group.IsMod, &group.IsAdmin, &group.IsBanned, &group.Tag)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = s.initGroup(g)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.groups[g.ID] = g
|
||||
}
|
||||
if err = rows.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
s.groupCount = i
|
||||
err = mgs.initGroup(group)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
mgs.groups[group.ID] = group
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
mgs.groupCount = i
|
||||
|
||||
DebugLog("Binding the Not Loggedin Group")
|
||||
GuestPerms = s.dirtyGetUnsafe(6).Perms // ! Race?
|
||||
TopicListThaw.Thaw()
|
||||
return nil
|
||||
DebugLog("Binding the Not Loggedin Group")
|
||||
GuestPerms = mgs.dirtyGetUnsafe(6).Perms
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: Hit the database when the item isn't in memory
|
||||
func (s *MemoryGroupStore) dirtyGetUnsafe(id int) *Group {
|
||||
group, ok := s.groups[id]
|
||||
if !ok {
|
||||
return &blankGroup
|
||||
}
|
||||
return group
|
||||
func (mgs *MemoryGroupStore) dirtyGetUnsafe(gid int) *Group {
|
||||
group, ok := mgs.groups[gid]
|
||||
if !ok {
|
||||
return &blankGroup
|
||||
}
|
||||
return group
|
||||
}
|
||||
|
||||
// TODO: Hit the database when the item isn't in memory
|
||||
func (s *MemoryGroupStore) DirtyGet(id int) *Group {
|
||||
s.RLock()
|
||||
group, ok := s.groups[id]
|
||||
s.RUnlock()
|
||||
if !ok {
|
||||
return &blankGroup
|
||||
}
|
||||
return group
|
||||
func (mgs *MemoryGroupStore) DirtyGet(gid int) *Group {
|
||||
mgs.RLock()
|
||||
group, ok := mgs.groups[gid]
|
||||
mgs.RUnlock()
|
||||
if !ok {
|
||||
return &blankGroup
|
||||
}
|
||||
return group
|
||||
}
|
||||
|
||||
// TODO: Hit the database when the item isn't in memory
|
||||
func (s *MemoryGroupStore) Get(id int) (*Group, error) {
|
||||
s.RLock()
|
||||
group, ok := s.groups[id]
|
||||
s.RUnlock()
|
||||
if !ok {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
return group, nil
|
||||
func (mgs *MemoryGroupStore) Get(gid int) (*Group, error) {
|
||||
mgs.RLock()
|
||||
group, ok := mgs.groups[gid]
|
||||
mgs.RUnlock()
|
||||
if !ok {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
return group, nil
|
||||
}
|
||||
|
||||
// TODO: Hit the database when the item isn't in memory
|
||||
func (s *MemoryGroupStore) GetCopy(id int) (Group, error) {
|
||||
s.RLock()
|
||||
group, ok := s.groups[id]
|
||||
s.RUnlock()
|
||||
if !ok {
|
||||
return blankGroup, ErrNoRows
|
||||
}
|
||||
return *group, nil
|
||||
func (mgs *MemoryGroupStore) GetCopy(gid int) (Group, error) {
|
||||
mgs.RLock()
|
||||
group, ok := mgs.groups[gid]
|
||||
mgs.RUnlock()
|
||||
if !ok {
|
||||
return blankGroup, ErrNoRows
|
||||
}
|
||||
return *group, nil
|
||||
}
|
||||
|
||||
func (s *MemoryGroupStore) Reload(id int) error {
|
||||
// TODO: Reload this data too
|
||||
g, e := s.Get(id)
|
||||
if e != nil {
|
||||
LogError(errors.New("can't get cansee data for group #" + strconv.Itoa(id)))
|
||||
return nil
|
||||
}
|
||||
canSee := g.CanSee
|
||||
func (mgs *MemoryGroupStore) Reload(id int) error {
|
||||
var group = &Group{ID: id}
|
||||
err := mgs.get.QueryRow(id).Scan(&group.Name, &group.PermissionsText, &group.PluginPermsText, &group.IsMod, &group.IsAdmin, &group.IsBanned, &group.Tag)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
g = &Group{ID: id, CanSee: canSee}
|
||||
e = s.get.QueryRow(id).Scan(&g.Name, &g.PermissionsText, &g.PluginPermsText, &g.IsMod, &g.IsAdmin, &g.IsBanned, &g.Tag)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
if e = s.initGroup(g); e != nil {
|
||||
LogError(e)
|
||||
return nil
|
||||
}
|
||||
err = mgs.initGroup(group)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
mgs.CacheSet(group)
|
||||
|
||||
s.CacheSet(g)
|
||||
TopicListThaw.Thaw()
|
||||
return nil
|
||||
err = RebuildGroupPermissions(id)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *MemoryGroupStore) initGroup(g *Group) error {
|
||||
e := json.Unmarshal(g.PermissionsText, &g.Perms)
|
||||
if e != nil {
|
||||
log.Printf("g: %+v\n", g)
|
||||
log.Print("bad group perms: ", g.PermissionsText)
|
||||
return e
|
||||
}
|
||||
DebugLogf(g.Name+": %+v\n", g.Perms)
|
||||
func (mgs *MemoryGroupStore) initGroup(group *Group) error {
|
||||
err := json.Unmarshal(group.PermissionsText, &group.Perms)
|
||||
if err != nil {
|
||||
log.Printf("group: %+v\n", group)
|
||||
log.Print("bad group perms: ", group.PermissionsText)
|
||||
return err
|
||||
}
|
||||
DebugLogf(group.Name+": %+v\n", group.Perms)
|
||||
|
||||
e = json.Unmarshal(g.PluginPermsText, &g.PluginPerms)
|
||||
if e != nil {
|
||||
log.Printf("g: %+v\n", g)
|
||||
log.Print("bad group plugin perms: ", g.PluginPermsText)
|
||||
return e
|
||||
}
|
||||
DebugLogf(g.Name+": %+v\n", g.PluginPerms)
|
||||
err = json.Unmarshal(group.PluginPermsText, &group.PluginPerms)
|
||||
if err != nil {
|
||||
log.Printf("group: %+v\n", group)
|
||||
log.Print("bad group plugin perms: ", group.PluginPermsText)
|
||||
return err
|
||||
}
|
||||
DebugLogf(group.Name+": %+v\n", group.PluginPerms)
|
||||
|
||||
//group.Perms.ExtData = make(map[string]bool)
|
||||
// TODO: Can we optimise the bit where this cascades down to the user now?
|
||||
if g.IsAdmin || g.IsMod {
|
||||
g.IsBanned = false
|
||||
}
|
||||
//group.Perms.ExtData = make(map[string]bool)
|
||||
// TODO: Can we optimise the bit where this cascades down to the user now?
|
||||
if group.IsAdmin || group.IsMod {
|
||||
group.IsBanned = false
|
||||
}
|
||||
|
||||
e = s.userCount.QueryRow(g.ID).Scan(&g.UserCount)
|
||||
if e != sql.ErrNoRows {
|
||||
return e
|
||||
}
|
||||
return nil
|
||||
err = mgs.userCount.QueryRow(group.ID).Scan(&group.UserCount)
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *MemoryGroupStore) SetCanSee(gid int, canSee []int) error {
|
||||
s.Lock()
|
||||
group, ok := s.groups[gid]
|
||||
if !ok {
|
||||
s.Unlock()
|
||||
return nil
|
||||
}
|
||||
ngroup := &Group{}
|
||||
*ngroup = *group
|
||||
ngroup.CanSee = canSee
|
||||
s.groups[group.ID] = ngroup
|
||||
s.Unlock()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *MemoryGroupStore) CacheSet(g *Group) error {
|
||||
s.Lock()
|
||||
s.groups[g.ID] = g
|
||||
s.Unlock()
|
||||
return nil
|
||||
func (mgs *MemoryGroupStore) CacheSet(group *Group) error {
|
||||
mgs.Lock()
|
||||
mgs.groups[group.ID] = group
|
||||
mgs.Unlock()
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: Hit the database when the item isn't in memory
|
||||
func (s *MemoryGroupStore) Exists(id int) bool {
|
||||
s.RLock()
|
||||
group, ok := s.groups[id]
|
||||
s.RUnlock()
|
||||
return ok && group.Name != ""
|
||||
func (mgs *MemoryGroupStore) Exists(gid int) bool {
|
||||
mgs.RLock()
|
||||
group, ok := mgs.groups[gid]
|
||||
mgs.RUnlock()
|
||||
return ok && group.Name != ""
|
||||
}
|
||||
|
||||
// ? Allow two groups with the same name?
|
||||
// TODO: Refactor this
|
||||
func (s *MemoryGroupStore) Create(name, tag string, isAdmin, isMod, isBanned bool) (gid int, err error) {
|
||||
permstr := "{}"
|
||||
tx, err := qgen.Builder.Begin()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
func (mgs *MemoryGroupStore) Create(name string, tag string, isAdmin bool, isMod bool, isBanned bool) (gid int, err error) {
|
||||
var permstr = "{}"
|
||||
tx, err := qgen.Builder.Begin()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
insertTx, err := qgen.Builder.SimpleInsertTx(tx, "users_groups", "name,tag,is_admin,is_mod,is_banned,permissions,plugin_perms", "?,?,?,?,?,?,'{}'")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
res, err := insertTx.Exec(name, tag, isAdmin, isMod, isBanned, permstr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
gid64, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
gid = int(gid64)
|
||||
insertTx, err := qgen.Builder.SimpleInsertTx(tx, "users_groups", "name, tag, is_admin, is_mod, is_banned, permissions, plugin_perms", "?,?,?,?,?,?,'{}'")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
res, err := insertTx.Exec(name, tag, isAdmin, isMod, isBanned, permstr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
perms := BlankPerms
|
||||
blankIntList := []int{}
|
||||
pluginPerms := make(map[string]bool)
|
||||
pluginPermsBytes := []byte("{}")
|
||||
GetHookTable().Vhook("create_group_preappend", &pluginPerms, &pluginPermsBytes)
|
||||
gid64, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
gid = int(gid64)
|
||||
|
||||
// Generate the forum permissions based on the presets...
|
||||
forums, err := Forums.GetAll()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
var perms = BlankPerms
|
||||
var blankIntList []int
|
||||
var pluginPerms = make(map[string]bool)
|
||||
var pluginPermsBytes = []byte("{}")
|
||||
RunVhook("create_group_preappend", &pluginPerms, &pluginPermsBytes)
|
||||
|
||||
presetSet := make(map[int]string)
|
||||
permSet := make(map[int]*ForumPerms)
|
||||
for _, f := range forums {
|
||||
var thePreset string
|
||||
switch {
|
||||
case isAdmin:
|
||||
thePreset = "admins"
|
||||
case isMod:
|
||||
thePreset = "staff"
|
||||
case isBanned:
|
||||
thePreset = "banned"
|
||||
default:
|
||||
thePreset = "members"
|
||||
}
|
||||
// Generate the forum permissions based on the presets...
|
||||
fdata, err := Forums.GetAll()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
permmap := PresetToPermmap(f.Preset)
|
||||
permItem := permmap[thePreset]
|
||||
permItem.Overrides = true
|
||||
var presetSet = make(map[int]string)
|
||||
var permSet = make(map[int]*ForumPerms)
|
||||
for _, forum := range fdata {
|
||||
var thePreset string
|
||||
switch {
|
||||
case isAdmin:
|
||||
thePreset = "admins"
|
||||
case isMod:
|
||||
thePreset = "staff"
|
||||
case isBanned:
|
||||
thePreset = "banned"
|
||||
default:
|
||||
thePreset = "members"
|
||||
}
|
||||
|
||||
permSet[f.ID] = permItem
|
||||
presetSet[f.ID] = f.Preset
|
||||
}
|
||||
permmap := PresetToPermmap(forum.Preset)
|
||||
permItem := permmap[thePreset]
|
||||
permItem.Overrides = true
|
||||
|
||||
err = ReplaceForumPermsForGroupTx(tx, gid, presetSet, permSet)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
permSet[forum.ID] = permItem
|
||||
presetSet[forum.ID] = forum.Preset
|
||||
}
|
||||
|
||||
// TODO: Can we optimise the bit where this cascades down to the user now?
|
||||
if isAdmin || isMod {
|
||||
isBanned = false
|
||||
}
|
||||
err = ReplaceForumPermsForGroupTx(tx, gid, presetSet, permSet)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
s.CacheAdd(&Group{gid, name, isMod, isAdmin, isBanned, tag, perms, []byte(permstr), pluginPerms, pluginPermsBytes, blankIntList, 0})
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
TopicListThaw.Thaw()
|
||||
return gid, FPStore.ReloadAll()
|
||||
//return gid, TopicList.RebuildPermTree()
|
||||
// TODO: Can we optimise the bit where this cascades down to the user now?
|
||||
if isAdmin || isMod {
|
||||
isBanned = false
|
||||
}
|
||||
|
||||
mgs.Lock()
|
||||
mgs.groups[gid] = &Group{gid, name, isMod, isAdmin, isBanned, tag, perms, []byte(permstr), pluginPerms, pluginPermsBytes, blankIntList, 0}
|
||||
mgs.groupCount++
|
||||
mgs.Unlock()
|
||||
|
||||
err = FPStore.ReloadAll()
|
||||
if err != nil {
|
||||
return gid, err
|
||||
}
|
||||
err = TopicList.RebuildPermTree()
|
||||
if err != nil {
|
||||
return gid, err
|
||||
}
|
||||
|
||||
return gid, nil
|
||||
}
|
||||
|
||||
func (s *MemoryGroupStore) CacheAdd(g *Group) error {
|
||||
s.Lock()
|
||||
s.groups[g.ID] = g
|
||||
s.groupCount++
|
||||
s.Unlock()
|
||||
return nil
|
||||
func (mgs *MemoryGroupStore) GetAll() (results []*Group, err error) {
|
||||
var i int
|
||||
mgs.RLock()
|
||||
results = make([]*Group, len(mgs.groups))
|
||||
for _, group := range mgs.groups {
|
||||
results[i] = group
|
||||
i++
|
||||
}
|
||||
mgs.RUnlock()
|
||||
sort.Sort(SortGroup(results))
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (s *MemoryGroupStore) GetAll() (results []*Group, err error) {
|
||||
var i int
|
||||
s.RLock()
|
||||
results = make([]*Group, len(s.groups))
|
||||
for _, group := range s.groups {
|
||||
results[i] = group
|
||||
i++
|
||||
}
|
||||
s.RUnlock()
|
||||
sort.Sort(SortGroup(results))
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (s *MemoryGroupStore) GetAllMap() (map[int]*Group, error) {
|
||||
s.RLock()
|
||||
defer s.RUnlock()
|
||||
return s.groups, nil
|
||||
func (mgs *MemoryGroupStore) GetAllMap() (map[int]*Group, error) {
|
||||
mgs.RLock()
|
||||
defer mgs.RUnlock()
|
||||
return mgs.groups, nil
|
||||
}
|
||||
|
||||
// ? - Set the lower and higher numbers to 0 to remove the bounds
|
||||
// TODO: Might be a little slow right now, maybe we can cache the groups in a slice or break the map up into chunks
|
||||
func (s *MemoryGroupStore) GetRange(lower, higher int) (groups []*Group, err error) {
|
||||
if lower == 0 && higher == 0 {
|
||||
return s.GetAll()
|
||||
}
|
||||
func (mgs *MemoryGroupStore) GetRange(lower int, higher int) (groups []*Group, err error) {
|
||||
if lower == 0 && higher == 0 {
|
||||
return mgs.GetAll()
|
||||
}
|
||||
|
||||
// TODO: Simplify these four conditionals into two
|
||||
if lower == 0 {
|
||||
if higher < 0 {
|
||||
return nil, errors.New("higher may not be lower than 0")
|
||||
}
|
||||
} else if higher == 0 {
|
||||
if lower < 0 {
|
||||
return nil, errors.New("lower may not be lower than 0")
|
||||
}
|
||||
}
|
||||
// TODO: Simplify these four conditionals into two
|
||||
if lower == 0 {
|
||||
if higher < 0 {
|
||||
return nil, errors.New("higher may not be lower than 0")
|
||||
}
|
||||
} else if higher == 0 {
|
||||
if lower < 0 {
|
||||
return nil, errors.New("lower may not be lower than 0")
|
||||
}
|
||||
}
|
||||
|
||||
s.RLock()
|
||||
for gid, group := range s.groups {
|
||||
if gid >= lower && (gid <= higher || higher == 0) {
|
||||
groups = append(groups, group)
|
||||
}
|
||||
}
|
||||
s.RUnlock()
|
||||
sort.Sort(SortGroup(groups))
|
||||
mgs.RLock()
|
||||
for gid, group := range mgs.groups {
|
||||
if gid >= lower && (gid <= higher || higher == 0) {
|
||||
groups = append(groups, group)
|
||||
}
|
||||
}
|
||||
mgs.RUnlock()
|
||||
sort.Sort(SortGroup(groups))
|
||||
|
||||
return groups, nil
|
||||
return groups, nil
|
||||
}
|
||||
|
||||
func (s *MemoryGroupStore) Length() int {
|
||||
s.RLock()
|
||||
defer s.RUnlock()
|
||||
return s.groupCount
|
||||
func (mgs *MemoryGroupStore) Length() int {
|
||||
mgs.RLock()
|
||||
defer mgs.RUnlock()
|
||||
return mgs.groupCount
|
||||
}
|
||||
|
||||
func (s *MemoryGroupStore) Count() (count int) {
|
||||
err := s.count.QueryRow().Scan(&count)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return count
|
||||
func (mgs *MemoryGroupStore) GlobalCount() (count int) {
|
||||
err := mgs.count.QueryRow().Scan(&count)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
@ -3,82 +3,78 @@ package common
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var IPSearch IPSearcher
|
||||
|
||||
type IPSearcher interface {
|
||||
Lookup(ip string) (uids []int, e error)
|
||||
Lookup(ip string) (uids []int, err error)
|
||||
}
|
||||
|
||||
type DefaultIPSearcher struct {
|
||||
searchUsers *sql.Stmt
|
||||
searchTopics *sql.Stmt
|
||||
searchReplies *sql.Stmt
|
||||
searchUsersReplies *sql.Stmt
|
||||
searchUsers *sql.Stmt
|
||||
searchTopics *sql.Stmt
|
||||
searchReplies *sql.Stmt
|
||||
searchUsersReplies *sql.Stmt
|
||||
}
|
||||
|
||||
// NewDefaultIPSearcher gives you a new instance of DefaultIPSearcher
|
||||
func NewDefaultIPSearcher() (*DefaultIPSearcher, error) {
|
||||
acc := qgen.NewAcc()
|
||||
uu := "users"
|
||||
q := func(tbl string) *sql.Stmt {
|
||||
return acc.Select(uu).Columns("uid").InQ("uid", acc.Select(tbl).Columns("createdBy").Where("ip=?")).Prepare()
|
||||
}
|
||||
return &DefaultIPSearcher{
|
||||
searchUsers: acc.Select(uu).Columns("uid").Where("last_ip=? OR last_ip LIKE CONCAT('%-',?)").Prepare(),
|
||||
searchTopics: q("topics"),
|
||||
searchReplies: q("replies"),
|
||||
searchUsersReplies: q("users_replies"),
|
||||
}, acc.FirstError()
|
||||
acc := qgen.NewAcc()
|
||||
return &DefaultIPSearcher{
|
||||
searchUsers: acc.Select("users").Columns("uid").Where("last_ip = ?").Prepare(),
|
||||
searchTopics: acc.Select("users").Columns("uid").InQ("uid", acc.Select("topics").Columns("createdBy").Where("ipaddress = ?")).Prepare(),
|
||||
searchReplies: acc.Select("users").Columns("uid").InQ("uid", acc.Select("replies").Columns("createdBy").Where("ipaddress = ?")).Prepare(),
|
||||
searchUsersReplies: acc.Select("users").Columns("uid").InQ("uid", acc.Select("users_replies").Columns("createdBy").Where("ipaddress = ?")).Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultIPSearcher) Lookup(ip string) (uids []int, e error) {
|
||||
var uid int
|
||||
reqUserList := make(map[int]bool)
|
||||
runQuery2 := func(rows *sql.Rows, e error) error {
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
defer rows.Close()
|
||||
func (searcher *DefaultIPSearcher) Lookup(ip string) (uids []int, err error) {
|
||||
var uid int
|
||||
var reqUserList = make(map[int]bool)
|
||||
|
||||
for rows.Next() {
|
||||
if e := rows.Scan(&uid); e != nil {
|
||||
return e
|
||||
}
|
||||
reqUserList[uid] = true
|
||||
}
|
||||
return rows.Err()
|
||||
}
|
||||
runQuery := func(stmt *sql.Stmt) error {
|
||||
return runQuery2(stmt.Query(ip))
|
||||
}
|
||||
var runQuery = func(stmt *sql.Stmt) error {
|
||||
rows, err := stmt.Query(ip)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
e = runQuery2(s.searchUsers.Query(ip, ip))
|
||||
if e != nil {
|
||||
return uids, e
|
||||
}
|
||||
e = runQuery(s.searchTopics)
|
||||
if e != nil {
|
||||
return uids, e
|
||||
}
|
||||
e = runQuery(s.searchReplies)
|
||||
if e != nil {
|
||||
return uids, e
|
||||
}
|
||||
e = runQuery(s.searchUsersReplies)
|
||||
if e != nil {
|
||||
return uids, e
|
||||
}
|
||||
for rows.Next() {
|
||||
err := rows.Scan(&uid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reqUserList[uid] = true
|
||||
}
|
||||
return rows.Err()
|
||||
}
|
||||
|
||||
// Convert the user ID map to a slice, then bulk load the users
|
||||
uids = make([]int, len(reqUserList))
|
||||
var i int
|
||||
for userID := range reqUserList {
|
||||
uids[i] = userID
|
||||
i++
|
||||
}
|
||||
err = runQuery(searcher.searchUsers)
|
||||
if err != nil {
|
||||
return uids, err
|
||||
}
|
||||
err = runQuery(searcher.searchTopics)
|
||||
if err != nil {
|
||||
return uids, err
|
||||
}
|
||||
err = runQuery(searcher.searchReplies)
|
||||
if err != nil {
|
||||
return uids, err
|
||||
}
|
||||
err = runQuery(searcher.searchUsersReplies)
|
||||
if err != nil {
|
||||
return uids, err
|
||||
}
|
||||
|
||||
return uids, nil
|
||||
// Convert the user ID map to a slice, then bulk load the users
|
||||
uids = make([]int, len(reqUserList))
|
||||
var i int
|
||||
for userID := range reqUserList {
|
||||
uids[i] = userID
|
||||
i++
|
||||
}
|
||||
|
||||
return uids, nil
|
||||
}
|
||||
|
103
common/likes.go
103
common/likes.go
@ -1,103 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var Likes LikeStore
|
||||
|
||||
type LikeStore interface {
|
||||
BulkExists(ids []int, sentBy int, targetType string) ([]int, error)
|
||||
BulkExistsFunc(ids []int, sentBy int, targetType string, f func(int) error) error
|
||||
Delete(targetID int, targetType string) error
|
||||
Count() (count int)
|
||||
}
|
||||
|
||||
type DefaultLikeStore struct {
|
||||
count *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
singleExists *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultLikeStore(acc *qgen.Accumulator) (*DefaultLikeStore, error) {
|
||||
return &DefaultLikeStore{
|
||||
count: acc.Count("likes").Prepare(),
|
||||
delete: acc.Delete("likes").Where("targetItem=? AND targetType=?").Prepare(),
|
||||
singleExists: acc.Select("likes").Columns("targetItem").Where("sentBy=? AND targetType=? AND targetItem=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
// TODO: Write a test for this
|
||||
func (s *DefaultLikeStore) BulkExists(ids []int, sentBy int, targetType string) (eids []int, e error) {
|
||||
if len(ids) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
var rows *sql.Rows
|
||||
if len(ids) == 1 {
|
||||
rows, e = s.singleExists.Query(sentBy, targetType, ids[0])
|
||||
} else {
|
||||
rows, e = qgen.NewAcc().Select("likes").Columns("targetItem").Where("sentBy=? AND targetType=?").In("targetItem", ids).Query(sentBy, targetType)
|
||||
}
|
||||
if e == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
} else if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var id int
|
||||
for rows.Next() {
|
||||
if e := rows.Scan(&id); e != nil {
|
||||
return nil, e
|
||||
}
|
||||
eids = append(eids, id)
|
||||
}
|
||||
return eids, rows.Err()
|
||||
}
|
||||
|
||||
// TODO: Write a test for this
|
||||
func (s *DefaultLikeStore) BulkExistsFunc(ids []int, sentBy int, targetType string, f func(id int) error) (e error) {
|
||||
if len(ids) == 0 {
|
||||
return nil
|
||||
}
|
||||
var rows *sql.Rows
|
||||
if len(ids) == 1 {
|
||||
rows, e = s.singleExists.Query(sentBy, targetType, ids[0])
|
||||
} else {
|
||||
rows, e = qgen.NewAcc().Select("likes").Columns("targetItem").Where("sentBy=? AND targetType=?").In("targetItem", ids).Query(sentBy, targetType)
|
||||
}
|
||||
if e == sql.ErrNoRows {
|
||||
return nil
|
||||
} else if e != nil {
|
||||
return e
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var id int
|
||||
for rows.Next() {
|
||||
if e := rows.Scan(&id); e != nil {
|
||||
return e
|
||||
}
|
||||
if e := f(id); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return rows.Err()
|
||||
}
|
||||
|
||||
func (s *DefaultLikeStore) Delete(targetID int, targetType string) error {
|
||||
_, err := s.delete.Exec(targetID, targetType)
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Write a test for this
|
||||
// Count returns the total number of likes globally
|
||||
func (s *DefaultLikeStore) Count() (count int) {
|
||||
e := s.count.QueryRow().Scan(&count)
|
||||
if e != nil {
|
||||
LogError(e)
|
||||
}
|
||||
return count
|
||||
}
|
@ -3,28 +3,28 @@ package common
|
||||
import "sync"
|
||||
|
||||
type DefaultMenuItemStore struct {
|
||||
items map[int]MenuItem
|
||||
lock sync.RWMutex
|
||||
items map[int]MenuItem
|
||||
itemLock sync.RWMutex
|
||||
}
|
||||
|
||||
func NewDefaultMenuItemStore() *DefaultMenuItemStore {
|
||||
return &DefaultMenuItemStore{
|
||||
items: make(map[int]MenuItem),
|
||||
}
|
||||
return &DefaultMenuItemStore{
|
||||
items: make(map[int]MenuItem),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *DefaultMenuItemStore) Add(i MenuItem) {
|
||||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
s.items[i.ID] = i
|
||||
func (store *DefaultMenuItemStore) Add(item MenuItem) {
|
||||
store.itemLock.Lock()
|
||||
defer store.itemLock.Unlock()
|
||||
store.items[item.ID] = item
|
||||
}
|
||||
|
||||
func (s *DefaultMenuItemStore) Get(id int) (MenuItem, error) {
|
||||
s.lock.RLock()
|
||||
item, ok := s.items[id]
|
||||
s.lock.RUnlock()
|
||||
if ok {
|
||||
return item, nil
|
||||
}
|
||||
return item, ErrNoRows
|
||||
func (store *DefaultMenuItemStore) Get(id int) (MenuItem, error) {
|
||||
store.itemLock.RLock()
|
||||
item, ok := store.items[id]
|
||||
store.itemLock.RUnlock()
|
||||
if ok {
|
||||
return item, nil
|
||||
}
|
||||
return item, ErrNoRows
|
||||
}
|
||||
|
@ -5,71 +5,71 @@ import (
|
||||
"strconv"
|
||||
"sync/atomic"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var Menus *DefaultMenuStore
|
||||
|
||||
type DefaultMenuStore struct {
|
||||
menus map[int]*atomic.Value
|
||||
itemStore *DefaultMenuItemStore
|
||||
menus map[int]*atomic.Value
|
||||
itemStore *DefaultMenuItemStore
|
||||
}
|
||||
|
||||
func NewDefaultMenuStore() *DefaultMenuStore {
|
||||
return &DefaultMenuStore{
|
||||
make(map[int]*atomic.Value),
|
||||
NewDefaultMenuItemStore(),
|
||||
}
|
||||
return &DefaultMenuStore{
|
||||
make(map[int]*atomic.Value),
|
||||
NewDefaultMenuItemStore(),
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Add actual support for multiple menus
|
||||
func (s *DefaultMenuStore) GetAllMap() (out map[int]*MenuListHolder) {
|
||||
out = make(map[int]*MenuListHolder)
|
||||
for mid, atom := range s.menus {
|
||||
out[mid] = atom.Load().(*MenuListHolder)
|
||||
}
|
||||
return out
|
||||
func (store *DefaultMenuStore) GetAllMap() (out map[int]*MenuListHolder) {
|
||||
out = make(map[int]*MenuListHolder)
|
||||
for mid, atom := range store.menus {
|
||||
out[mid] = atom.Load().(*MenuListHolder)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (s *DefaultMenuStore) Get(mid int) (*MenuListHolder, error) {
|
||||
aStore, ok := s.menus[mid]
|
||||
if ok {
|
||||
return aStore.Load().(*MenuListHolder), nil
|
||||
}
|
||||
return nil, ErrNoRows
|
||||
func (store *DefaultMenuStore) Get(mid int) (*MenuListHolder, error) {
|
||||
aStore, ok := store.menus[mid]
|
||||
if ok {
|
||||
return aStore.Load().(*MenuListHolder), nil
|
||||
}
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
|
||||
func (s *DefaultMenuStore) Items(mid int) (mlist MenuItemList, err error) {
|
||||
err = qgen.NewAcc().Select("menu_items").Columns("miid,name,htmlID,cssClass,position,path,aria,tooltip,order,tmplName,guestOnly,memberOnly,staffOnly,adminOnly").Where("mid=" + strconv.Itoa(mid)).Orderby("order ASC").Each(func(rows *sql.Rows) error {
|
||||
i := MenuItem{MenuID: mid}
|
||||
err := rows.Scan(&i.ID, &i.Name, &i.HTMLID, &i.CSSClass, &i.Position, &i.Path, &i.Aria, &i.Tooltip, &i.Order, &i.TmplName, &i.GuestOnly, &i.MemberOnly, &i.SuperModOnly, &i.AdminOnly)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.itemStore.Add(i)
|
||||
mlist = append(mlist, i)
|
||||
return nil
|
||||
})
|
||||
return mlist, err
|
||||
func (store *DefaultMenuStore) Items(mid int) (mlist MenuItemList, err error) {
|
||||
err = qgen.NewAcc().Select("menu_items").Columns("miid, name, htmlID, cssClass, position, path, aria, tooltip, order, tmplName, guestOnly, memberOnly, staffOnly, adminOnly").Where("mid = " + strconv.Itoa(mid)).Orderby("order ASC").Each(func(rows *sql.Rows) error {
|
||||
var mitem = MenuItem{MenuID: mid}
|
||||
err := rows.Scan(&mitem.ID, &mitem.Name, &mitem.HTMLID, &mitem.CSSClass, &mitem.Position, &mitem.Path, &mitem.Aria, &mitem.Tooltip, &mitem.Order, &mitem.TmplName, &mitem.GuestOnly, &mitem.MemberOnly, &mitem.SuperModOnly, &mitem.AdminOnly)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
store.itemStore.Add(mitem)
|
||||
mlist = append(mlist, mitem)
|
||||
return nil
|
||||
})
|
||||
return mlist, err
|
||||
}
|
||||
|
||||
func (s *DefaultMenuStore) Load(mid int) error {
|
||||
mlist, err := s.Items(mid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
hold := &MenuListHolder{mid, mlist, make(map[int]menuTmpl)}
|
||||
err = hold.Preparse()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
func (store *DefaultMenuStore) Load(mid int) error {
|
||||
mlist, err := store.Items(mid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
hold := &MenuListHolder{mid, mlist, make(map[int]menuTmpl)}
|
||||
err = hold.Preparse()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
aStore := &atomic.Value{}
|
||||
aStore.Store(hold)
|
||||
s.menus[mid] = aStore
|
||||
return nil
|
||||
var aStore = &atomic.Value{}
|
||||
aStore.Store(hold)
|
||||
store.menus[mid] = aStore
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DefaultMenuStore) ItemStore() *DefaultMenuItemStore {
|
||||
return s.itemStore
|
||||
func (store *DefaultMenuStore) ItemStore() *DefaultMenuItemStore {
|
||||
return store.itemStore
|
||||
}
|
||||
|
808
common/menus.go
808
common/menus.go
@ -7,507 +7,487 @@ import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"git.tuxpa.in/a/gosora/common/phrases"
|
||||
tmpl "git.tuxpa.in/a/gosora/common/templates"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
type MenuItemList []MenuItem
|
||||
|
||||
type MenuListHolder struct {
|
||||
MenuID int
|
||||
List MenuItemList
|
||||
Variations map[int]menuTmpl // 0 = Guest Menu, 1 = Member Menu, 2 = Super Mod Menu, 3 = Admin Menu
|
||||
}
|
||||
|
||||
type menuPath struct {
|
||||
Path string
|
||||
Index int
|
||||
MenuID int
|
||||
List MenuItemList
|
||||
Variations map[int]menuTmpl // 0 = Guest Menu, 1 = Member Menu, 2 = Super Mod Menu, 3 = Admin Menu
|
||||
}
|
||||
|
||||
type menuTmpl struct {
|
||||
RenderBuffer [][]byte
|
||||
VariableIndices []int
|
||||
PathMappings []menuPath
|
||||
RenderBuffer [][]byte
|
||||
VariableIndices []int
|
||||
}
|
||||
|
||||
type MenuItem struct {
|
||||
ID int
|
||||
MenuID int
|
||||
ID int
|
||||
MenuID int
|
||||
|
||||
Name string
|
||||
HTMLID string
|
||||
CSSClass string
|
||||
Position string
|
||||
Path string
|
||||
Aria string
|
||||
Tooltip string
|
||||
Order int
|
||||
TmplName string
|
||||
Name string
|
||||
HTMLID string
|
||||
CSSClass string
|
||||
Position string
|
||||
Path string
|
||||
Aria string
|
||||
Tooltip string
|
||||
Order int
|
||||
TmplName string
|
||||
|
||||
GuestOnly bool
|
||||
MemberOnly bool
|
||||
SuperModOnly bool
|
||||
AdminOnly bool
|
||||
GuestOnly bool
|
||||
MemberOnly bool
|
||||
SuperModOnly bool
|
||||
AdminOnly bool
|
||||
}
|
||||
|
||||
// TODO: Move the menu item stuff to it's own file
|
||||
type MenuItemStmts struct {
|
||||
update *sql.Stmt
|
||||
insert *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
updateOrder *sql.Stmt
|
||||
update *sql.Stmt
|
||||
insert *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
updateOrder *sql.Stmt
|
||||
}
|
||||
|
||||
var menuItemStmts MenuItemStmts
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
mi := "menu_items"
|
||||
menuItemStmts = MenuItemStmts{
|
||||
update: acc.Update(mi).Set("name=?,htmlID=?,cssClass=?,position=?,path=?,aria=?,tooltip=?,tmplName=?,guestOnly=?,memberOnly=?,staffOnly=?,adminOnly=?").Where("miid=?").Prepare(),
|
||||
insert: acc.Insert(mi).Columns("mid, name, htmlID, cssClass, position, path, aria, tooltip, tmplName, guestOnly, memberOnly, staffOnly, adminOnly").Fields("?,?,?,?,?,?,?,?,?,?,?,?,?").Prepare(),
|
||||
delete: acc.Delete(mi).Where("miid=?").Prepare(),
|
||||
updateOrder: acc.Update(mi).Set("order=?").Where("miid=?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
menuItemStmts = MenuItemStmts{
|
||||
update: acc.Update("menu_items").Set("name = ?, htmlID = ?, cssClass = ?, position = ?, path = ?, aria = ?, tooltip = ?, tmplName = ?, guestOnly = ?, memberOnly = ?, staffOnly = ?, adminOnly = ?").Where("miid = ?").Prepare(),
|
||||
insert: acc.Insert("menu_items").Columns("mid, name, htmlID, cssClass, position, path, aria, tooltip, tmplName, guestOnly, memberOnly, staffOnly, adminOnly").Fields("?,?,?,?,?,?,?,?,?,?,?,?,?").Prepare(),
|
||||
delete: acc.Delete("menu_items").Where("miid = ?").Prepare(),
|
||||
updateOrder: acc.Update("menu_items").Set("order = ?").Where("miid = ?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
func (i MenuItem) Commit() error {
|
||||
_, e := menuItemStmts.update.Exec(i.Name, i.HTMLID, i.CSSClass, i.Position, i.Path, i.Aria, i.Tooltip, i.TmplName, i.GuestOnly, i.MemberOnly, i.SuperModOnly, i.AdminOnly, i.ID)
|
||||
Menus.Load(i.MenuID)
|
||||
return e
|
||||
func (item MenuItem) Commit() error {
|
||||
_, err := menuItemStmts.update.Exec(item.Name, item.HTMLID, item.CSSClass, item.Position, item.Path, item.Aria, item.Tooltip, item.TmplName, item.GuestOnly, item.MemberOnly, item.SuperModOnly, item.AdminOnly, item.ID)
|
||||
Menus.Load(item.MenuID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (i MenuItem) Create() (int, error) {
|
||||
res, e := menuItemStmts.insert.Exec(i.MenuID, i.Name, i.HTMLID, i.CSSClass, i.Position, i.Path, i.Aria, i.Tooltip, i.TmplName, i.GuestOnly, i.MemberOnly, i.SuperModOnly, i.AdminOnly)
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
Menus.Load(i.MenuID)
|
||||
func (item MenuItem) Create() (int, error) {
|
||||
res, err := menuItemStmts.insert.Exec(item.MenuID, item.Name, item.HTMLID, item.CSSClass, item.Position, item.Path, item.Aria, item.Tooltip, item.TmplName, item.GuestOnly, item.MemberOnly, item.SuperModOnly, item.AdminOnly)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
Menus.Load(item.MenuID)
|
||||
|
||||
miid64, e := res.LastInsertId()
|
||||
return int(miid64), e
|
||||
miid64, err := res.LastInsertId()
|
||||
return int(miid64), err
|
||||
}
|
||||
|
||||
func (i MenuItem) Delete() error {
|
||||
_, e := menuItemStmts.delete.Exec(i.ID)
|
||||
Menus.Load(i.MenuID)
|
||||
return e
|
||||
func (item MenuItem) Delete() error {
|
||||
_, err := menuItemStmts.delete.Exec(item.ID)
|
||||
Menus.Load(item.MenuID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (h *MenuListHolder) LoadTmpl(name string) (t MenuTmpl, e error) {
|
||||
data, e := ioutil.ReadFile("./templates/" + name + ".html")
|
||||
if e != nil {
|
||||
return t, e
|
||||
}
|
||||
return h.Parse(name, []byte(tmpl.Minify(string(data)))), nil
|
||||
func (hold *MenuListHolder) LoadTmpl(name string) (menuTmpl MenuTmpl, err error) {
|
||||
data, err := ioutil.ReadFile("./templates/" + name + ".html")
|
||||
if err != nil {
|
||||
return menuTmpl, err
|
||||
}
|
||||
return hold.Parse(name, data), nil
|
||||
}
|
||||
|
||||
// TODO: Make this atomic, maybe with a transaction or store the order on the menu itself?
|
||||
func (h *MenuListHolder) UpdateOrder(updateMap map[int]int) error {
|
||||
for miid, order := range updateMap {
|
||||
_, e := menuItemStmts.updateOrder.Exec(order, miid)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
Menus.Load(h.MenuID)
|
||||
return nil
|
||||
func (hold *MenuListHolder) UpdateOrder(updateMap map[int]int) error {
|
||||
for miid, order := range updateMap {
|
||||
_, err := menuItemStmts.updateOrder.Exec(order, miid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
Menus.Load(hold.MenuID)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (h *MenuListHolder) LoadTmpls() (tmpls map[string]MenuTmpl, e error) {
|
||||
tmpls = make(map[string]MenuTmpl)
|
||||
load := func(name string) error {
|
||||
menuTmpl, e := h.LoadTmpl(name)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
tmpls[name] = menuTmpl
|
||||
return nil
|
||||
}
|
||||
e = load("menu_item")
|
||||
if e != nil {
|
||||
return tmpls, e
|
||||
}
|
||||
e = load("menu_alerts")
|
||||
return tmpls, e
|
||||
func (hold *MenuListHolder) LoadTmpls() (tmpls map[string]MenuTmpl, err error) {
|
||||
tmpls = make(map[string]MenuTmpl)
|
||||
var loadTmpl = func(name string) error {
|
||||
menuTmpl, err := hold.LoadTmpl(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tmpls[name] = menuTmpl
|
||||
return nil
|
||||
}
|
||||
err = loadTmpl("menu_item")
|
||||
if err != nil {
|
||||
return tmpls, err
|
||||
}
|
||||
err = loadTmpl("menu_alerts")
|
||||
return tmpls, err
|
||||
}
|
||||
|
||||
// TODO: Run this in main, sync ticks, when the phrase file changes (need to implement the sync for that first), and when the settings are changed
|
||||
func (h *MenuListHolder) Preparse() error {
|
||||
tmpls, err := h.LoadTmpls()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
func (hold *MenuListHolder) Preparse() error {
|
||||
tmpls, err := hold.LoadTmpls()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
addVariation := func(index int, callback func(i MenuItem) bool) {
|
||||
renderBuffer, variableIndices, pathList := h.Scan(tmpls, callback)
|
||||
h.Variations[index] = menuTmpl{renderBuffer, variableIndices, pathList}
|
||||
}
|
||||
var addVariation = func(index int, callback func(mitem MenuItem) bool) {
|
||||
renderBuffer, variableIndices := hold.Scan(tmpls, callback)
|
||||
hold.Variations[index] = menuTmpl{renderBuffer, variableIndices}
|
||||
//fmt.Print("renderBuffer: ")
|
||||
//menuDumpSlice(renderBuffer)
|
||||
//fmt.Printf("\nvariableIndices: %+v\n", variableIndices)
|
||||
}
|
||||
|
||||
// Guest Menu
|
||||
addVariation(0, func(i MenuItem) bool {
|
||||
return !i.MemberOnly
|
||||
})
|
||||
// Member Menu
|
||||
addVariation(1, func(i MenuItem) bool {
|
||||
return !i.SuperModOnly && !i.GuestOnly
|
||||
})
|
||||
// Super Mod Menu
|
||||
addVariation(2, func(i MenuItem) bool {
|
||||
return !i.AdminOnly && !i.GuestOnly
|
||||
})
|
||||
// Admin Menu
|
||||
addVariation(3, func(i MenuItem) bool {
|
||||
return !i.GuestOnly
|
||||
})
|
||||
return nil
|
||||
// Guest Menu
|
||||
addVariation(0, func(mitem MenuItem) bool {
|
||||
return !mitem.MemberOnly
|
||||
})
|
||||
// Member Menu
|
||||
addVariation(1, func(mitem MenuItem) bool {
|
||||
return !mitem.SuperModOnly && !mitem.GuestOnly
|
||||
})
|
||||
// Super Mod Menu
|
||||
addVariation(2, func(mitem MenuItem) bool {
|
||||
return !mitem.AdminOnly && !mitem.GuestOnly
|
||||
})
|
||||
// Admin Menu
|
||||
addVariation(3, func(mitem MenuItem) bool {
|
||||
return !mitem.GuestOnly
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
func nextCharIs(tmplData []byte, i int, expects byte) bool {
|
||||
if len(tmplData) <= (i + 1) {
|
||||
return false
|
||||
}
|
||||
return tmplData[i+1] == expects
|
||||
if len(tmplData) <= (i + 1) {
|
||||
return false
|
||||
}
|
||||
return tmplData[i+1] == expects
|
||||
}
|
||||
|
||||
func peekNextChar(tmplData []byte, i int) byte {
|
||||
if len(tmplData) <= (i + 1) {
|
||||
return 0
|
||||
}
|
||||
return tmplData[i+1]
|
||||
if len(tmplData) <= (i + 1) {
|
||||
return 0
|
||||
}
|
||||
return tmplData[i+1]
|
||||
}
|
||||
|
||||
func skipUntilIfExists(tmplData []byte, i int, expects byte) (newI int, hasIt bool) {
|
||||
j := i
|
||||
for ; j < len(tmplData); j++ {
|
||||
if tmplData[j] == expects {
|
||||
return j, true
|
||||
}
|
||||
}
|
||||
return j, false
|
||||
}
|
||||
|
||||
func skipUntilIfExistsOrLine(tmplData []byte, i int, expects byte) (newI int, hasIt bool) {
|
||||
j := i
|
||||
for ; j < len(tmplData); j++ {
|
||||
if tmplData[j] == 10 {
|
||||
return j, false
|
||||
} else if tmplData[j] == expects {
|
||||
return j, true
|
||||
}
|
||||
}
|
||||
return j, false
|
||||
j := i
|
||||
for ; j < len(tmplData); j++ {
|
||||
if tmplData[j] == expects {
|
||||
return j, true
|
||||
}
|
||||
}
|
||||
return j, false
|
||||
}
|
||||
|
||||
func skipUntilCharsExist(tmplData []byte, i int, expects []byte) (newI int, hasIt bool) {
|
||||
j := i
|
||||
expectIndex := 0
|
||||
for ; j < len(tmplData) && expectIndex < len(expects); j++ {
|
||||
//fmt.Println("tmplData[j]: ", string(tmplData[j]))
|
||||
if tmplData[j] != expects[expectIndex] {
|
||||
return j, false
|
||||
}
|
||||
//fmt.Printf("found %+v at %d\n", string(expects[expectIndex]), expectIndex)
|
||||
expectIndex++
|
||||
}
|
||||
return j, true
|
||||
j := i
|
||||
expectIndex := 0
|
||||
for ; j < len(tmplData) && expectIndex < len(expects); j++ {
|
||||
//fmt.Println("tmplData[j]: ", string(tmplData[j]))
|
||||
if tmplData[j] != expects[expectIndex] {
|
||||
return j, false
|
||||
}
|
||||
//fmt.Printf("found %+v at %d\n", string(expects[expectIndex]), expectIndex)
|
||||
expectIndex++
|
||||
}
|
||||
return j, true
|
||||
}
|
||||
|
||||
func skipAllUntilCharsExist(tmplData []byte, i int, expects []byte) (newI int, hasIt bool) {
|
||||
j := i
|
||||
expectIndex := 0
|
||||
for ; j < len(tmplData) && expectIndex < len(expects); j++ {
|
||||
if tmplData[j] == expects[expectIndex] {
|
||||
//fmt.Printf("expects[expectIndex]: %+v - %d\n", string(expects[expectIndex]), expectIndex)
|
||||
expectIndex++
|
||||
if len(expects) <= expectIndex {
|
||||
break
|
||||
}
|
||||
} else {
|
||||
/*if expectIndex != 0 {
|
||||
fmt.Println("broke expectations")
|
||||
fmt.Println("expected: ", string(expects[expectIndex]))
|
||||
fmt.Println("got: ", string(tmplData[j]))
|
||||
fmt.Println("next: ", string(peekNextChar(tmplData, j)))
|
||||
fmt.Println("next: ", string(peekNextChar(tmplData, j+1)))
|
||||
fmt.Println("next: ", string(peekNextChar(tmplData, j+2)))
|
||||
fmt.Println("next: ", string(peekNextChar(tmplData, j+3)))
|
||||
}*/
|
||||
expectIndex = 0
|
||||
}
|
||||
}
|
||||
return j, len(expects) == expectIndex
|
||||
j := i
|
||||
expectIndex := 0
|
||||
//fmt.Printf("tmplData: %+v\n", string(tmplData))
|
||||
for ; j < len(tmplData) && expectIndex < len(expects); j++ {
|
||||
//fmt.Println("j: ", j)
|
||||
//fmt.Println("tmplData[j]: ", string(tmplData[j])+" ")
|
||||
if tmplData[j] == expects[expectIndex] {
|
||||
//fmt.Printf("expects[expectIndex]: %+v - %d\n", string(expects[expectIndex]), expectIndex)
|
||||
expectIndex++
|
||||
if len(expects) <= expectIndex {
|
||||
//fmt.Println("breaking")
|
||||
break
|
||||
}
|
||||
} else {
|
||||
/*if expectIndex != 0 {
|
||||
fmt.Println("broke expectations")
|
||||
fmt.Println("expected: ", string(expects[expectIndex]))
|
||||
fmt.Println("got: ", string(tmplData[j]))
|
||||
fmt.Println("next: ", string(peekNextChar(tmplData, j)))
|
||||
fmt.Println("next: ", string(peekNextChar(tmplData, j+1)))
|
||||
fmt.Println("next: ", string(peekNextChar(tmplData, j+2)))
|
||||
fmt.Println("next: ", string(peekNextChar(tmplData, j+3)))
|
||||
}*/
|
||||
expectIndex = 0
|
||||
}
|
||||
}
|
||||
//fmt.Println("len(expects): ", len(expects))
|
||||
//fmt.Println("expectIndex: ", expectIndex)
|
||||
return j, len(expects) == expectIndex
|
||||
}
|
||||
|
||||
type menuRenderItem struct {
|
||||
Type int // 0: text, 1: variable
|
||||
Index int
|
||||
Type int // 0: text, 1: variable
|
||||
Index int
|
||||
}
|
||||
|
||||
type MenuTmpl struct {
|
||||
Name string
|
||||
TextBuffer [][]byte
|
||||
VariableBuffer [][]byte
|
||||
RenderList []menuRenderItem
|
||||
Name string
|
||||
TextBuffer [][]byte
|
||||
VariableBuffer [][]byte
|
||||
RenderList []menuRenderItem
|
||||
}
|
||||
|
||||
func menuDumpSlice(outerSlice [][]byte) {
|
||||
for sliceID, slice := range outerSlice {
|
||||
fmt.Print(strconv.Itoa(sliceID) + ":[")
|
||||
for _, ch := range slice {
|
||||
fmt.Print(string(ch))
|
||||
}
|
||||
fmt.Print("] ")
|
||||
}
|
||||
for sliceID, slice := range outerSlice {
|
||||
fmt.Print(strconv.Itoa(sliceID) + ":[")
|
||||
for _, char := range slice {
|
||||
fmt.Print(string(char))
|
||||
}
|
||||
fmt.Print("] ")
|
||||
}
|
||||
}
|
||||
|
||||
func (h *MenuListHolder) Parse(name string, tmplData []byte) (menuTmpl MenuTmpl) {
|
||||
var textBuffer, variableBuffer [][]byte
|
||||
var renderList []menuRenderItem
|
||||
var subBuffer []byte
|
||||
func (hold *MenuListHolder) Parse(name string, tmplData []byte) (menuTmpl MenuTmpl) {
|
||||
//fmt.Println("tmplData: ", string(tmplData))
|
||||
var textBuffer, variableBuffer [][]byte
|
||||
var renderList []menuRenderItem
|
||||
var subBuffer []byte
|
||||
|
||||
// ? We only support simple properties on MenuItem right now
|
||||
addVariable := func(name []byte) {
|
||||
// TODO: Check if the subBuffer has any items or is empty
|
||||
textBuffer = append(textBuffer, subBuffer)
|
||||
subBuffer = nil
|
||||
// ? We only support simple properties on MenuItem right now
|
||||
var addVariable = func(name []byte) {
|
||||
//fmt.Println("appending subBuffer: ", string(subBuffer))
|
||||
// TODO: Check if the subBuffer has any items or is empty
|
||||
textBuffer = append(textBuffer, subBuffer)
|
||||
subBuffer = nil
|
||||
|
||||
variableBuffer = append(variableBuffer, name)
|
||||
renderList = append(renderList, menuRenderItem{0, len(textBuffer) - 1})
|
||||
renderList = append(renderList, menuRenderItem{1, len(variableBuffer) - 1})
|
||||
}
|
||||
//fmt.Println("adding variable: ", string(name))
|
||||
variableBuffer = append(variableBuffer, name)
|
||||
renderList = append(renderList, menuRenderItem{0, len(textBuffer) - 1})
|
||||
renderList = append(renderList, menuRenderItem{1, len(variableBuffer) - 1})
|
||||
}
|
||||
|
||||
tmplData = bytes.Replace(tmplData, []byte("{{"), []byte("{"), -1)
|
||||
tmplData = bytes.Replace(tmplData, []byte("}}"), []byte("}}"), -1)
|
||||
for i := 0; i < len(tmplData); i++ {
|
||||
char := tmplData[i]
|
||||
if char == '{' {
|
||||
dotIndex, hasDot := skipUntilIfExists(tmplData, i, '.')
|
||||
if !hasDot {
|
||||
// Template function style
|
||||
langIndex, hasChars := skipUntilCharsExist(tmplData, i+1, []byte("lang"))
|
||||
if hasChars {
|
||||
startIndex, hasStart := skipUntilIfExists(tmplData, langIndex, '"')
|
||||
endIndex, hasEnd := skipUntilIfExists(tmplData, startIndex+1, '"')
|
||||
if hasStart && hasEnd {
|
||||
fenceIndex, hasFence := skipUntilIfExists(tmplData, endIndex, '}')
|
||||
if !hasFence || !nextCharIs(tmplData, fenceIndex, '}') {
|
||||
break
|
||||
}
|
||||
//fmt.Println("tmplData[startIndex:endIndex]: ", tmplData[startIndex+1:endIndex])
|
||||
prefix := []byte("lang.")
|
||||
addVariable(append(prefix, tmplData[startIndex+1:endIndex]...))
|
||||
i = fenceIndex + 1
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
fenceIndex, hasFence := skipUntilIfExists(tmplData, dotIndex, '}')
|
||||
if !hasFence {
|
||||
break
|
||||
}
|
||||
addVariable(tmplData[dotIndex:fenceIndex])
|
||||
i = fenceIndex + 1
|
||||
continue
|
||||
}
|
||||
subBuffer = append(subBuffer, char)
|
||||
}
|
||||
if len(subBuffer) > 0 {
|
||||
// TODO: Have a property in renderList which holds the byte slice since variableBuffers and textBuffers have the same underlying implementation?
|
||||
textBuffer = append(textBuffer, subBuffer)
|
||||
renderList = append(renderList, menuRenderItem{0, len(textBuffer) - 1})
|
||||
}
|
||||
tmplData = bytes.Replace(tmplData, []byte("{{"), []byte("{"), -1)
|
||||
tmplData = bytes.Replace(tmplData, []byte("}}"), []byte("}}"), -1)
|
||||
for i := 0; i < len(tmplData); i++ {
|
||||
char := tmplData[i]
|
||||
if char == '{' {
|
||||
//fmt.Println("found open fence")
|
||||
dotIndex, hasDot := skipUntilIfExists(tmplData, i, '.')
|
||||
if !hasDot {
|
||||
//fmt.Println("no dot, assumed template function style")
|
||||
// Template function style
|
||||
langIndex, hasChars := skipUntilCharsExist(tmplData, i+1, []byte("lang"))
|
||||
if hasChars {
|
||||
startIndex, hasStart := skipUntilIfExists(tmplData, langIndex, '"')
|
||||
endIndex, hasEnd := skipUntilIfExists(tmplData, startIndex+1, '"')
|
||||
if hasStart && hasEnd {
|
||||
fenceIndex, hasFence := skipUntilIfExists(tmplData, endIndex, '}')
|
||||
if !hasFence || !nextCharIs(tmplData, fenceIndex, '}') {
|
||||
break
|
||||
}
|
||||
//fmt.Println("tmplData[startIndex:endIndex]: ", tmplData[startIndex+1:endIndex])
|
||||
prefix := []byte("lang.")
|
||||
addVariable(append(prefix, tmplData[startIndex+1:endIndex]...))
|
||||
i = fenceIndex + 1
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
fenceIndex, hasFence := skipUntilIfExists(tmplData, dotIndex, '}')
|
||||
if !hasFence {
|
||||
//fmt.Println("no end fence")
|
||||
break
|
||||
}
|
||||
addVariable(tmplData[dotIndex:fenceIndex])
|
||||
i = fenceIndex + 1
|
||||
continue
|
||||
}
|
||||
subBuffer = append(subBuffer, char)
|
||||
}
|
||||
if len(subBuffer) > 0 {
|
||||
// TODO: Have a property in renderList which holds the byte slice since variableBuffers and textBuffers have the same underlying implementation?
|
||||
textBuffer = append(textBuffer, subBuffer)
|
||||
renderList = append(renderList, menuRenderItem{0, len(textBuffer) - 1})
|
||||
}
|
||||
|
||||
return MenuTmpl{name, textBuffer, variableBuffer, renderList}
|
||||
//fmt.Println("name: ", name)
|
||||
//fmt.Print("textBuffer: ")
|
||||
//menuDumpSlice(textBuffer)
|
||||
//fmt.Print("\nvariableBuffer: ")
|
||||
//menuDumpSlice(variableBuffer)
|
||||
//fmt.Printf("\nrenderList: %+v\n", renderList)
|
||||
return MenuTmpl{name, textBuffer, variableBuffer, renderList}
|
||||
}
|
||||
|
||||
func (h *MenuListHolder) Scan(tmpls map[string]MenuTmpl, showItem func(i MenuItem) bool) (renderBuffer [][]byte, variableIndices []int, pathList []menuPath) {
|
||||
for _, mitem := range h.List {
|
||||
// Do we want this item in this variation of the menu?
|
||||
if !showItem(mitem) {
|
||||
continue
|
||||
}
|
||||
renderBuffer, variableIndices = h.ScanItem(tmpls, mitem, renderBuffer, variableIndices)
|
||||
pathList = append(pathList, menuPath{mitem.Path, len(renderBuffer) - 1})
|
||||
}
|
||||
|
||||
// TODO: Need more coalescing in the renderBuffer
|
||||
return renderBuffer, variableIndices, pathList
|
||||
func (hold *MenuListHolder) Scan(menuTmpls map[string]MenuTmpl, showItem func(mitem MenuItem) bool) (renderBuffer [][]byte, variableIndices []int) {
|
||||
for _, mitem := range hold.List {
|
||||
// Do we want this item in this variation of the menu?
|
||||
if !showItem(mitem) {
|
||||
continue
|
||||
}
|
||||
renderBuffer, variableIndices = hold.ScanItem(menuTmpls, mitem, renderBuffer, variableIndices)
|
||||
}
|
||||
// TODO: Need more coalescing in the renderBuffer
|
||||
return renderBuffer, variableIndices
|
||||
}
|
||||
|
||||
// Note: This doesn't do a visibility check like hold.Scan() does
|
||||
func (h *MenuListHolder) ScanItem(tmpls map[string]MenuTmpl, mitem MenuItem, renderBuffer [][]byte, variableIndices []int) ([][]byte, []int) {
|
||||
menuTmpl, ok := tmpls[mitem.TmplName]
|
||||
if !ok {
|
||||
menuTmpl = tmpls["menu_item"]
|
||||
}
|
||||
func (hold *MenuListHolder) ScanItem(menuTmpls map[string]MenuTmpl, mitem MenuItem, renderBuffer [][]byte, variableIndices []int) ([][]byte, []int) {
|
||||
menuTmpl, ok := menuTmpls[mitem.TmplName]
|
||||
if !ok {
|
||||
menuTmpl = menuTmpls["menu_item"]
|
||||
}
|
||||
|
||||
for _, renderItem := range menuTmpl.RenderList {
|
||||
if renderItem.Type == 0 {
|
||||
renderBuffer = append(renderBuffer, menuTmpl.TextBuffer[renderItem.Index])
|
||||
continue
|
||||
}
|
||||
//fmt.Println("menuTmpl: ", menuTmpl)
|
||||
for _, renderItem := range menuTmpl.RenderList {
|
||||
if renderItem.Type == 0 {
|
||||
renderBuffer = append(renderBuffer, menuTmpl.TextBuffer[renderItem.Index])
|
||||
continue
|
||||
}
|
||||
|
||||
variable := menuTmpl.VariableBuffer[renderItem.Index]
|
||||
dotAt, hasDot := skipUntilIfExists(variable, 0, '.')
|
||||
if !hasDot {
|
||||
continue
|
||||
}
|
||||
variable := menuTmpl.VariableBuffer[renderItem.Index]
|
||||
//fmt.Println("initial variable: ", string(variable))
|
||||
dotAt, hasDot := skipUntilIfExists(variable, 0, '.')
|
||||
if !hasDot {
|
||||
//fmt.Println("no dot")
|
||||
continue
|
||||
}
|
||||
|
||||
if bytes.Equal(variable[:dotAt], []byte("lang")) {
|
||||
renderBuffer = append(renderBuffer, []byte(phrases.GetTmplPhrase(string(bytes.TrimPrefix(variable[dotAt:], []byte("."))))))
|
||||
continue
|
||||
}
|
||||
if bytes.Equal(variable[:dotAt], []byte("lang")) {
|
||||
//fmt.Println("lang: ", string(bytes.TrimPrefix(variable[dotAt:], []byte("."))))
|
||||
renderBuffer = append(renderBuffer, []byte(GetTmplPhrase(string(bytes.TrimPrefix(variable[dotAt:], []byte("."))))))
|
||||
} else {
|
||||
var renderItem []byte
|
||||
switch string(variable) {
|
||||
case ".ID":
|
||||
renderItem = []byte(strconv.Itoa(mitem.ID))
|
||||
case ".Name":
|
||||
renderItem = []byte(mitem.Name)
|
||||
case ".HTMLID":
|
||||
renderItem = []byte(mitem.HTMLID)
|
||||
case ".CSSClass":
|
||||
renderItem = []byte(mitem.CSSClass)
|
||||
case ".Position":
|
||||
renderItem = []byte(mitem.Position)
|
||||
case ".Path":
|
||||
renderItem = []byte(mitem.Path)
|
||||
case ".Aria":
|
||||
renderItem = []byte(mitem.Aria)
|
||||
case ".Tooltip":
|
||||
renderItem = []byte(mitem.Tooltip)
|
||||
}
|
||||
|
||||
var renderItem []byte
|
||||
switch string(variable) {
|
||||
case ".ID":
|
||||
renderItem = []byte(strconv.Itoa(mitem.ID))
|
||||
case ".Name":
|
||||
renderItem = []byte(mitem.Name)
|
||||
case ".HTMLID":
|
||||
renderItem = []byte(mitem.HTMLID)
|
||||
case ".CSSClass":
|
||||
renderItem = []byte(mitem.CSSClass)
|
||||
case ".Position":
|
||||
renderItem = []byte(mitem.Position)
|
||||
case ".Path":
|
||||
renderItem = []byte(mitem.Path)
|
||||
case ".Aria":
|
||||
renderItem = []byte(mitem.Aria)
|
||||
case ".Tooltip":
|
||||
renderItem = []byte(mitem.Tooltip)
|
||||
case ".CSSActive":
|
||||
renderItem = []byte("{dyn.active}")
|
||||
}
|
||||
_, hasInnerVar := skipUntilIfExists(renderItem, 0, '{')
|
||||
if hasInnerVar {
|
||||
//fmt.Println("inner var: ", string(renderItem))
|
||||
dotAt, hasDot := skipUntilIfExists(renderItem, 0, '.')
|
||||
endFence, hasEndFence := skipUntilIfExists(renderItem, dotAt, '}')
|
||||
if !hasDot || !hasEndFence || (endFence-dotAt) <= 1 {
|
||||
renderBuffer = append(renderBuffer, renderItem)
|
||||
variableIndices = append(variableIndices, len(renderBuffer)-1)
|
||||
continue
|
||||
}
|
||||
|
||||
_, hasInnerVar := skipUntilIfExists(renderItem, 0, '{')
|
||||
if hasInnerVar {
|
||||
DebugLog("inner var: ", string(renderItem))
|
||||
dotAt, hasDot := skipUntilIfExists(renderItem, 0, '.')
|
||||
endFence, hasEndFence := skipUntilIfExists(renderItem, dotAt, '}')
|
||||
if !hasDot || !hasEndFence || (endFence-dotAt) <= 1 {
|
||||
renderBuffer = append(renderBuffer, renderItem)
|
||||
variableIndices = append(variableIndices, len(renderBuffer)-1)
|
||||
continue
|
||||
}
|
||||
if bytes.Equal(renderItem[1:dotAt], []byte("lang")) {
|
||||
//fmt.Println("lang var: ", string(renderItem[dotAt+1:endFence]))
|
||||
renderBuffer = append(renderBuffer, []byte(GetTmplPhrase(string(renderItem[dotAt+1:endFence]))))
|
||||
} else {
|
||||
//fmt.Println("other var: ", string(variable[:dotAt]))
|
||||
if len(renderItem) > 0 {
|
||||
renderBuffer = append(renderBuffer, renderItem)
|
||||
variableIndices = append(variableIndices, len(renderBuffer)-1)
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if bytes.Equal(renderItem[1:dotAt], []byte("lang")) {
|
||||
//fmt.Println("lang var: ", string(renderItem[dotAt+1:endFence]))
|
||||
renderBuffer = append(renderBuffer, []byte(phrases.GetTmplPhrase(string(renderItem[dotAt+1:endFence]))))
|
||||
} else {
|
||||
fmt.Println("other var: ", string(variable[:dotAt]))
|
||||
if len(renderItem) > 0 {
|
||||
renderBuffer = append(renderBuffer, renderItem)
|
||||
variableIndices = append(variableIndices, len(renderBuffer)-1)
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
if len(renderItem) > 0 {
|
||||
renderBuffer = append(renderBuffer, renderItem)
|
||||
}
|
||||
}
|
||||
return renderBuffer, variableIndices
|
||||
//fmt.Println("normal var: ", string(variable[:dotAt]))
|
||||
if len(renderItem) > 0 {
|
||||
renderBuffer = append(renderBuffer, renderItem)
|
||||
}
|
||||
}
|
||||
}
|
||||
return renderBuffer, variableIndices
|
||||
}
|
||||
|
||||
// TODO: Pre-render the lang stuff
|
||||
func (h *MenuListHolder) Build(w io.Writer, user *User, pathPrefix string) error {
|
||||
var mTmpl menuTmpl
|
||||
if !user.Loggedin {
|
||||
mTmpl = h.Variations[0]
|
||||
} else if user.IsAdmin {
|
||||
mTmpl = h.Variations[3]
|
||||
} else if user.IsSuperMod {
|
||||
mTmpl = h.Variations[2]
|
||||
} else {
|
||||
mTmpl = h.Variations[1]
|
||||
}
|
||||
if pathPrefix == "" {
|
||||
pathPrefix = Config.DefaultPath
|
||||
}
|
||||
func (hold *MenuListHolder) Build(w io.Writer, user *User) error {
|
||||
var mTmpl menuTmpl
|
||||
if !user.Loggedin {
|
||||
mTmpl = hold.Variations[0]
|
||||
} else if user.IsAdmin {
|
||||
mTmpl = hold.Variations[3]
|
||||
} else if user.IsSuperMod {
|
||||
mTmpl = hold.Variations[2]
|
||||
} else {
|
||||
mTmpl = hold.Variations[1]
|
||||
}
|
||||
|
||||
if len(mTmpl.VariableIndices) == 0 {
|
||||
for _, renderItem := range mTmpl.RenderBuffer {
|
||||
w.Write(renderItem)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if len(mTmpl.VariableIndices) == 0 {
|
||||
//fmt.Println("no variable indices")
|
||||
for _, renderItem := range mTmpl.RenderBuffer {
|
||||
//fmt.Printf("renderItem: %+v\n", renderItem)
|
||||
w.Write(renderItem)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
nearIndex := 0
|
||||
for index, renderItem := range mTmpl.RenderBuffer {
|
||||
if index != mTmpl.VariableIndices[nearIndex] {
|
||||
w.Write(renderItem)
|
||||
continue
|
||||
}
|
||||
variable := renderItem
|
||||
// ? - I can probably remove this check now that I've kicked it upstream, or we could keep it here for safety's sake?
|
||||
if len(variable) == 0 {
|
||||
continue
|
||||
}
|
||||
var nearIndex = 0
|
||||
for index, renderItem := range mTmpl.RenderBuffer {
|
||||
if index != mTmpl.VariableIndices[nearIndex] {
|
||||
//fmt.Println("wrote text: ", string(renderItem))
|
||||
w.Write(renderItem)
|
||||
continue
|
||||
}
|
||||
|
||||
prevIndex := 0
|
||||
for i := 0; i < len(renderItem); i++ {
|
||||
fenceStart, hasFence := skipUntilIfExists(variable, i, '{')
|
||||
if !hasFence {
|
||||
continue
|
||||
}
|
||||
i = fenceStart
|
||||
fenceEnd, hasFence := skipUntilIfExists(variable, fenceStart, '}')
|
||||
if !hasFence {
|
||||
continue
|
||||
}
|
||||
i = fenceEnd
|
||||
dotAt, hasDot := skipUntilIfExists(variable, fenceStart, '.')
|
||||
if !hasDot {
|
||||
continue
|
||||
}
|
||||
|
||||
switch string(variable[fenceStart+1 : dotAt]) {
|
||||
case "me":
|
||||
w.Write(variable[prevIndex:fenceStart])
|
||||
switch string(variable[dotAt+1 : fenceEnd]) {
|
||||
case "Link":
|
||||
w.Write([]byte(user.Link))
|
||||
case "Session":
|
||||
w.Write([]byte(user.Session))
|
||||
}
|
||||
prevIndex = fenceEnd
|
||||
// TODO: Optimise this
|
||||
case "dyn":
|
||||
w.Write(variable[prevIndex:fenceStart])
|
||||
var pmi int
|
||||
for ii, pathItem := range mTmpl.PathMappings {
|
||||
pmi = ii
|
||||
if pathItem.Index > index {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(mTmpl.PathMappings) != 0 {
|
||||
path := mTmpl.PathMappings[pmi].Path
|
||||
if path == "" || path == "/" {
|
||||
path = Config.DefaultPath
|
||||
}
|
||||
if strings.HasPrefix(path, pathPrefix) {
|
||||
w.Write([]byte(" menu_active"))
|
||||
}
|
||||
}
|
||||
|
||||
prevIndex = fenceEnd
|
||||
}
|
||||
}
|
||||
|
||||
w.Write(variable[prevIndex : len(variable)-1])
|
||||
if len(mTmpl.VariableIndices) > (nearIndex + 1) {
|
||||
nearIndex++
|
||||
}
|
||||
}
|
||||
return nil
|
||||
//fmt.Println("variable: ", string(renderItem))
|
||||
variable := renderItem
|
||||
// ? - I can probably remove this check now that I've kicked it upstream, or we could keep it here for safety's sake?
|
||||
if len(variable) == 0 {
|
||||
continue
|
||||
}
|
||||
prevIndex := 0
|
||||
for i := 0; i < len(renderItem); i++ {
|
||||
fenceStart, hasFence := skipUntilIfExists(variable, i, '{')
|
||||
if !hasFence {
|
||||
continue
|
||||
}
|
||||
i = fenceStart
|
||||
fenceEnd, hasFence := skipUntilIfExists(variable, fenceStart, '}')
|
||||
if !hasFence {
|
||||
continue
|
||||
}
|
||||
i = fenceEnd
|
||||
dotAt, hasDot := skipUntilIfExists(variable, fenceStart, '.')
|
||||
if !hasDot {
|
||||
continue
|
||||
}
|
||||
//fmt.Println("checking me: ", string(variable[fenceStart+1:dotAt]))
|
||||
if bytes.Equal(variable[fenceStart+1:dotAt], []byte("me")) {
|
||||
//fmt.Println("maybe me variable")
|
||||
w.Write(variable[prevIndex:fenceStart])
|
||||
switch string(variable[dotAt+1 : fenceEnd]) {
|
||||
case "Link":
|
||||
w.Write([]byte(user.Link))
|
||||
case "Session":
|
||||
w.Write([]byte(user.Session))
|
||||
}
|
||||
prevIndex = fenceEnd
|
||||
}
|
||||
}
|
||||
//fmt.Println("prevIndex: ", prevIndex)
|
||||
//fmt.Println("len(variable)-1: ", len(variable)-1)
|
||||
w.Write(variable[prevIndex : len(variable)-1])
|
||||
if len(mTmpl.VariableIndices) > (nearIndex + 1) {
|
||||
nearIndex++
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -1,61 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
// MetaStore is a simple key-value store for the system to stash things in when needed
|
||||
type MetaStore interface {
|
||||
Get(name string) (val string, err error)
|
||||
Set(name, val string) error
|
||||
SetInt(name string, val int) error
|
||||
SetInt64(name string, val int64) error
|
||||
}
|
||||
|
||||
type DefaultMetaStore struct {
|
||||
get *sql.Stmt
|
||||
set *sql.Stmt
|
||||
add *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultMetaStore(acc *qgen.Accumulator) (*DefaultMetaStore, error) {
|
||||
t := "meta"
|
||||
m := &DefaultMetaStore{
|
||||
get: acc.Select(t).Columns("value").Where("name=?").Prepare(),
|
||||
set: acc.Update(t).Set("value=?").Where("name=?").Prepare(),
|
||||
add: acc.Insert(t).Columns("name,value").Fields("?,''").Prepare(),
|
||||
}
|
||||
return m, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultMetaStore) Get(name string) (val string, e error) {
|
||||
e = s.get.QueryRow(name).Scan(&val)
|
||||
return val, e
|
||||
}
|
||||
|
||||
// TODO: Use timestamped rows as a more robust method of ensuring data integrity
|
||||
func (s *DefaultMetaStore) setVal(name string, val interface{}) error {
|
||||
_, e := s.Get(name)
|
||||
if e == sql.ErrNoRows {
|
||||
_, e := s.add.Exec(name)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
_, e = s.set.Exec(val, name)
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *DefaultMetaStore) Set(name, val string) error {
|
||||
return s.setVal(name, val)
|
||||
}
|
||||
|
||||
func (s *DefaultMetaStore) SetInt(name string, val int) error {
|
||||
return s.setVal(name, val)
|
||||
}
|
||||
|
||||
func (s *DefaultMetaStore) SetInt64(name string, val int64) error {
|
||||
return s.setVal(name, val)
|
||||
}
|
@ -5,97 +5,97 @@ import (
|
||||
"errors"
|
||||
"strings"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var MFAstore MFAStore
|
||||
var ErrMFAScratchIndexOutOfBounds = errors.New("That MFA scratch index is out of bounds")
|
||||
|
||||
type MFAItemStmts struct {
|
||||
update *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
update *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
}
|
||||
|
||||
var mfaItemStmts MFAItemStmts
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
mfaItemStmts = MFAItemStmts{
|
||||
update: acc.Update("users_2fa_keys").Set("scratch1=?,scratch2=?,scratch3=?,scratch4=?,scratch5=?,scratch6=?,scratch7=?,scratch8=?").Where("uid=?").Prepare(),
|
||||
delete: acc.Delete("users_2fa_keys").Where("uid=?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
mfaItemStmts = MFAItemStmts{
|
||||
update: acc.Update("users_2fa_keys").Set("scratch1 = ?, scratch2, scratch3 = ?, scratch3 = ?, scratch4 = ?, scratch5 = ?, scratch6 = ?, scratch7 = ?, scratch8 = ?").Where("uid = ?").Prepare(),
|
||||
delete: acc.Delete("users_2fa_keys").Where("uid = ?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
type MFAItem struct {
|
||||
UID int
|
||||
Secret string
|
||||
Scratch []string
|
||||
UID int
|
||||
Secret string
|
||||
Scratch []string
|
||||
}
|
||||
|
||||
func (i *MFAItem) BurnScratch(index int) error {
|
||||
if index < 0 || len(i.Scratch) <= index {
|
||||
return ErrMFAScratchIndexOutOfBounds
|
||||
}
|
||||
newScratch, err := mfaCreateScratch()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
i.Scratch[index] = newScratch
|
||||
func (item *MFAItem) BurnScratch(index int) error {
|
||||
if index < 0 || len(item.Scratch) <= index {
|
||||
return ErrMFAScratchIndexOutOfBounds
|
||||
}
|
||||
newScratch, err := mfaCreateScratch()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
item.Scratch[index] = newScratch
|
||||
|
||||
_, err = mfaItemStmts.update.Exec(i.Scratch[0], i.Scratch[1], i.Scratch[2], i.Scratch[3], i.Scratch[4], i.Scratch[5], i.Scratch[6], i.Scratch[7], i.UID)
|
||||
return err
|
||||
_, err = mfaItemStmts.update.Exec(item.Scratch[0], item.Scratch[1], item.Scratch[2], item.Scratch[3], item.Scratch[4], item.Scratch[5], item.Scratch[6], item.Scratch[7], item.UID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (i *MFAItem) Delete() error {
|
||||
_, err := mfaItemStmts.delete.Exec(i.UID)
|
||||
return err
|
||||
func (item *MFAItem) Delete() error {
|
||||
_, err := mfaItemStmts.delete.Exec(item.UID)
|
||||
return err
|
||||
}
|
||||
|
||||
func mfaCreateScratch() (string, error) {
|
||||
code, err := GenerateStd32SafeString(8)
|
||||
return strings.Replace(code, "=", "", -1), err
|
||||
code, err := GenerateStd32SafeString(8)
|
||||
return strings.Replace(code, "=", "", -1), err
|
||||
}
|
||||
|
||||
type MFAStore interface {
|
||||
Get(id int) (*MFAItem, error)
|
||||
Create(secret string, uid int) (err error)
|
||||
Get(id int) (*MFAItem, error)
|
||||
Create(secret string, uid int) (err error)
|
||||
}
|
||||
|
||||
type SQLMFAStore struct {
|
||||
get *sql.Stmt
|
||||
create *sql.Stmt
|
||||
get *sql.Stmt
|
||||
create *sql.Stmt
|
||||
}
|
||||
|
||||
func NewSQLMFAStore(acc *qgen.Accumulator) (*SQLMFAStore, error) {
|
||||
return &SQLMFAStore{
|
||||
get: acc.Select("users_2fa_keys").Columns("secret,scratch1,scratch2,scratch3,scratch4,scratch5,scratch6,scratch7,scratch8").Where("uid=?").Prepare(),
|
||||
create: acc.Insert("users_2fa_keys").Columns("uid,secret,scratch1,scratch2,scratch3,scratch4,scratch5,scratch6,scratch7,scratch8,createdAt").Fields("?,?,?,?,?,?,?,?,?,?,UTC_TIMESTAMP()").Prepare(),
|
||||
}, acc.FirstError()
|
||||
return &SQLMFAStore{
|
||||
get: acc.Select("users_2fa_keys").Columns("secret, scratch1, scratch2, scratch3, scratch4, scratch5, scratch6, scratch7, scratch8").Where("uid = ?").Prepare(),
|
||||
create: acc.Insert("users_2fa_keys").Columns("uid, secret, scratch1, scratch2, scratch3, scratch4, scratch5, scratch6, scratch7, scratch8, createdAt").Fields("?,?,?,?,?,?,?,?,?,?,UTC_TIMESTAMP()").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
// TODO: Write a test for this
|
||||
func (s *SQLMFAStore) Get(id int) (*MFAItem, error) {
|
||||
i := MFAItem{UID: id, Scratch: make([]string, 8)}
|
||||
err := s.get.QueryRow(id).Scan(&i.Secret, &i.Scratch[0], &i.Scratch[1], &i.Scratch[2], &i.Scratch[3], &i.Scratch[4], &i.Scratch[5], &i.Scratch[6], &i.Scratch[7])
|
||||
return &i, err
|
||||
func (store *SQLMFAStore) Get(id int) (*MFAItem, error) {
|
||||
item := MFAItem{UID: id, Scratch: make([]string, 8)}
|
||||
err := store.get.QueryRow(id).Scan(&item.Secret, &item.Scratch[0], &item.Scratch[1], &item.Scratch[2], &item.Scratch[3], &item.Scratch[4], &item.Scratch[5], &item.Scratch[6], &item.Scratch[7])
|
||||
return &item, err
|
||||
|
||||
}
|
||||
|
||||
// TODO: Write a test for this
|
||||
func (s *SQLMFAStore) Create(secret string, uid int) (err error) {
|
||||
params := make([]interface{}, 10)
|
||||
params[0] = uid
|
||||
params[1] = secret
|
||||
for i := 2; i < len(params); i++ {
|
||||
code, err := mfaCreateScratch()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
params[i] = code
|
||||
}
|
||||
func (store *SQLMFAStore) Create(secret string, uid int) (err error) {
|
||||
var params = make([]interface{}, 10)
|
||||
params[0] = uid
|
||||
params[1] = secret
|
||||
for i := 2; i < len(params); i++ {
|
||||
code, err := mfaCreateScratch()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
params[i] = code
|
||||
}
|
||||
|
||||
_, err = s.create.Exec(params...)
|
||||
return err
|
||||
_, err = store.create.Exec(params...)
|
||||
return err
|
||||
}
|
||||
|
@ -1,227 +1,93 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"time"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
import "database/sql"
|
||||
import "../query_gen/lib"
|
||||
|
||||
var RegLogs RegLogStore
|
||||
var LoginLogs LoginLogStore
|
||||
|
||||
type RegLogItem struct {
|
||||
ID int
|
||||
Username string
|
||||
Email string
|
||||
FailureReason string
|
||||
Success bool
|
||||
IP string
|
||||
DoneAt string
|
||||
ID int
|
||||
Username string
|
||||
Email string
|
||||
FailureReason string
|
||||
Success bool
|
||||
IPAddress string
|
||||
DoneAt string
|
||||
}
|
||||
|
||||
type RegLogStmts struct {
|
||||
update *sql.Stmt
|
||||
create *sql.Stmt
|
||||
update *sql.Stmt
|
||||
create *sql.Stmt
|
||||
}
|
||||
|
||||
var regLogStmts RegLogStmts
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
rl := "registration_logs"
|
||||
regLogStmts = RegLogStmts{
|
||||
update: acc.Update(rl).Set("username=?,email=?,failureReason=?,success=?,doneAt=?").Where("rlid=?").Prepare(),
|
||||
create: acc.Insert(rl).Columns("username,email,failureReason,success,ipaddress,doneAt").Fields("?,?,?,?,?,UTC_TIMESTAMP()").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
regLogStmts = RegLogStmts{
|
||||
update: acc.Update("registration_logs").Set("username = ?, email = ?, failureReason = ?, success = ?").Where("rlid = ?").Prepare(),
|
||||
create: acc.Insert("registration_logs").Columns("username, email, failureReason, success, ipaddress, doneAt").Fields("?,?,?,?,?,UTC_TIMESTAMP()").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: Reload this item in the store, probably doesn't matter right now, but it might when we start caching this stuff in memory
|
||||
// ! Retroactive updates of date are not permitted for integrity reasons
|
||||
// TODO: Do we even use this anymore or can we just make the logs immutable (except for deletes) for simplicity sake?
|
||||
func (l *RegLogItem) Commit() error {
|
||||
_, e := regLogStmts.update.Exec(l.Username, l.Email, l.FailureReason, l.Success, l.DoneAt, l.ID)
|
||||
return e
|
||||
func (log *RegLogItem) Commit() error {
|
||||
_, err := regLogStmts.update.Exec(log.Username, log.Email, log.FailureReason, log.Success, log.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (l *RegLogItem) Create() (id int, e error) {
|
||||
id, e = Createf(regLogStmts.create, l.Username, l.Email, l.FailureReason, l.Success, l.IP)
|
||||
l.ID = id
|
||||
return l.ID, e
|
||||
func (log *RegLogItem) Create() (id int, err error) {
|
||||
res, err := regLogStmts.create.Exec(log.Username, log.Email, log.FailureReason, log.Success, log.IPAddress)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
id64, err := res.LastInsertId()
|
||||
log.ID = int(id64)
|
||||
return log.ID, err
|
||||
}
|
||||
|
||||
type RegLogStore interface {
|
||||
Count() (count int)
|
||||
GetOffset(offset, perPage int) (logs []RegLogItem, err error)
|
||||
Purge() error
|
||||
|
||||
DeleteOlderThanDays(days int) error
|
||||
GlobalCount() (logCount int)
|
||||
GetOffset(offset int, perPage int) (logs []RegLogItem, err error)
|
||||
}
|
||||
|
||||
type SQLRegLogStore struct {
|
||||
count *sql.Stmt
|
||||
getOffset *sql.Stmt
|
||||
purge *sql.Stmt
|
||||
|
||||
deleteOlderThanDays *sql.Stmt
|
||||
count *sql.Stmt
|
||||
getOffset *sql.Stmt
|
||||
}
|
||||
|
||||
func NewRegLogStore(acc *qgen.Accumulator) (*SQLRegLogStore, error) {
|
||||
rl := "registration_logs"
|
||||
return &SQLRegLogStore{
|
||||
count: acc.Count(rl).Prepare(),
|
||||
getOffset: acc.Select(rl).Columns("rlid,username,email,failureReason,success,ipaddress,doneAt").Orderby("doneAt DESC").Limit("?,?").Prepare(),
|
||||
purge: acc.Purge(rl),
|
||||
|
||||
deleteOlderThanDays: acc.Delete(rl).DateOlderThanQ("doneAt", "day").Prepare(),
|
||||
}, acc.FirstError()
|
||||
return &SQLRegLogStore{
|
||||
count: acc.Count("registration_logs").Prepare(),
|
||||
getOffset: acc.Select("registration_logs").Columns("rlid, username, email, failureReason, success, ipaddress, doneAt").Orderby("doneAt DESC").Limit("?,?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *SQLRegLogStore) Count() (count int) {
|
||||
return Count(s.count)
|
||||
func (store *SQLRegLogStore) GlobalCount() (logCount int) {
|
||||
err := store.count.QueryRow().Scan(&logCount)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return logCount
|
||||
}
|
||||
|
||||
func (s *SQLRegLogStore) GetOffset(offset, perPage int) (logs []RegLogItem, e error) {
|
||||
rows, e := s.getOffset.Query(offset, perPage)
|
||||
if e != nil {
|
||||
return logs, e
|
||||
}
|
||||
defer rows.Close()
|
||||
func (store *SQLRegLogStore) GetOffset(offset int, perPage int) (logs []RegLogItem, err error) {
|
||||
rows, err := store.getOffset.Query(offset, perPage)
|
||||
if err != nil {
|
||||
return logs, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var l RegLogItem
|
||||
var doneAt time.Time
|
||||
e := rows.Scan(&l.ID, &l.Username, &l.Email, &l.FailureReason, &l.Success, &l.IP, &doneAt)
|
||||
if e != nil {
|
||||
return logs, e
|
||||
}
|
||||
l.DoneAt = doneAt.Format("2006-01-02 15:04:05")
|
||||
logs = append(logs, l)
|
||||
}
|
||||
return logs, rows.Err()
|
||||
}
|
||||
|
||||
func (s *SQLRegLogStore) DeleteOlderThanDays(days int) error {
|
||||
_, e := s.deleteOlderThanDays.Exec(days)
|
||||
return e
|
||||
}
|
||||
|
||||
// Delete all registration logs
|
||||
func (s *SQLRegLogStore) Purge() error {
|
||||
_, e := s.purge.Exec()
|
||||
return e
|
||||
}
|
||||
|
||||
type LoginLogItem struct {
|
||||
ID int
|
||||
UID int
|
||||
Success bool
|
||||
IP string
|
||||
DoneAt string
|
||||
}
|
||||
|
||||
type LoginLogStmts struct {
|
||||
update *sql.Stmt
|
||||
create *sql.Stmt
|
||||
}
|
||||
|
||||
var loginLogStmts LoginLogStmts
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
ll := "login_logs"
|
||||
loginLogStmts = LoginLogStmts{
|
||||
update: acc.Update(ll).Set("uid=?,success=?,doneAt=?").Where("lid=?").Prepare(),
|
||||
create: acc.Insert(ll).Columns("uid,success,ipaddress,doneAt").Fields("?,?,?,UTC_TIMESTAMP()").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: Reload this item in the store, probably doesn't matter right now, but it might when we start caching this stuff in memory
|
||||
// ! Retroactive updates of date are not permitted for integrity reasons
|
||||
func (l *LoginLogItem) Commit() error {
|
||||
_, e := loginLogStmts.update.Exec(l.UID, l.Success, l.DoneAt, l.ID)
|
||||
return e
|
||||
}
|
||||
|
||||
func (l *LoginLogItem) Create() (id int, e error) {
|
||||
res, e := loginLogStmts.create.Exec(l.UID, l.Success, l.IP)
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
id64, e := res.LastInsertId()
|
||||
l.ID = int(id64)
|
||||
return l.ID, e
|
||||
}
|
||||
|
||||
type LoginLogStore interface {
|
||||
Count() (count int)
|
||||
CountUser(uid int) (count int)
|
||||
GetOffset(uid, offset, perPage int) (logs []LoginLogItem, err error)
|
||||
Purge() error
|
||||
|
||||
DeleteOlderThanDays(days int) error
|
||||
}
|
||||
|
||||
type SQLLoginLogStore struct {
|
||||
count *sql.Stmt
|
||||
countForUser *sql.Stmt
|
||||
getOffsetByUser *sql.Stmt
|
||||
purge *sql.Stmt
|
||||
|
||||
deleteOlderThanDays *sql.Stmt
|
||||
}
|
||||
|
||||
func NewLoginLogStore(acc *qgen.Accumulator) (*SQLLoginLogStore, error) {
|
||||
ll := "login_logs"
|
||||
return &SQLLoginLogStore{
|
||||
count: acc.Count(ll).Prepare(),
|
||||
countForUser: acc.Count(ll).Where("uid=?").Prepare(),
|
||||
getOffsetByUser: acc.Select(ll).Columns("lid,success,ipaddress,doneAt").Where("uid=?").Orderby("doneAt DESC").Limit("?,?").Prepare(),
|
||||
purge: acc.Purge(ll),
|
||||
|
||||
deleteOlderThanDays: acc.Delete(ll).DateOlderThanQ("doneAt", "day").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *SQLLoginLogStore) Count() (count int) {
|
||||
return Count(s.count)
|
||||
}
|
||||
|
||||
func (s *SQLLoginLogStore) CountUser(uid int) (count int) {
|
||||
return Countf(s.countForUser, uid)
|
||||
}
|
||||
|
||||
func (s *SQLLoginLogStore) GetOffset(uid, offset, perPage int) (logs []LoginLogItem, e error) {
|
||||
rows, e := s.getOffsetByUser.Query(uid, offset, perPage)
|
||||
if e != nil {
|
||||
return logs, e
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
l := LoginLogItem{UID: uid}
|
||||
var doneAt time.Time
|
||||
e := rows.Scan(&l.ID, &l.Success, &l.IP, &doneAt)
|
||||
if e != nil {
|
||||
return logs, e
|
||||
}
|
||||
l.DoneAt = doneAt.Format("2006-01-02 15:04:05")
|
||||
logs = append(logs, l)
|
||||
}
|
||||
return logs, rows.Err()
|
||||
}
|
||||
|
||||
func (s *SQLLoginLogStore) DeleteOlderThanDays(days int) error {
|
||||
_, e := s.deleteOlderThanDays.Exec(days)
|
||||
return e
|
||||
}
|
||||
|
||||
// Delete all login logs
|
||||
func (s *SQLLoginLogStore) Purge() error {
|
||||
_, e := s.purge.Exec()
|
||||
return e
|
||||
for rows.Next() {
|
||||
var log RegLogItem
|
||||
err := rows.Scan(&log.ID, &log.Username, &log.Email, &log.FailureReason, &log.Success, &log.IPAddress, &log.DoneAt)
|
||||
if err != nil {
|
||||
return logs, err
|
||||
}
|
||||
logs = append(logs, log)
|
||||
}
|
||||
return logs, rows.Err()
|
||||
}
|
||||
|
@ -1,99 +1,99 @@
|
||||
/*
|
||||
*
|
||||
* OttoJS Plugin Module
|
||||
* Copyright Azareal 2016 - 2019
|
||||
* OttoJS Plugin Module
|
||||
* Copyright Azareal 2016 - 2018
|
||||
*
|
||||
*/
|
||||
package common
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"errors"
|
||||
|
||||
"github.com/robertkrimen/otto"
|
||||
"github.com/robertkrimen/otto"
|
||||
)
|
||||
|
||||
type OttoPluginLang struct {
|
||||
vm *otto.Otto
|
||||
plugins map[string]*otto.Script
|
||||
vars map[string]*otto.Object
|
||||
vm *otto.Otto
|
||||
plugins map[string]*otto.Script
|
||||
vars map[string]*otto.Object
|
||||
}
|
||||
|
||||
func init() {
|
||||
pluginLangs["ottojs"] = &OttoPluginLang{
|
||||
plugins: make(map[string]*otto.Script),
|
||||
vars: make(map[string]*otto.Object),
|
||||
}
|
||||
pluginLangs["ottojs"] = &OttoPluginLang{
|
||||
plugins: make(map[string]*otto.Script),
|
||||
vars: make(map[string]*otto.Object),
|
||||
}
|
||||
}
|
||||
|
||||
func (js *OttoPluginLang) Init() (err error) {
|
||||
js.vm = otto.New()
|
||||
js.vars["current_page"], err = js.vm.Object(`var current_page = {}`)
|
||||
return err
|
||||
js.vm = otto.New()
|
||||
js.vars["current_page"], err = js.vm.Object(`var current_page = {}`)
|
||||
return err
|
||||
}
|
||||
|
||||
func (js *OttoPluginLang) GetName() string {
|
||||
return "ottojs"
|
||||
return "ottojs"
|
||||
}
|
||||
|
||||
func (js *OttoPluginLang) GetExts() []string {
|
||||
return []string{".js"}
|
||||
return []string{".js"}
|
||||
}
|
||||
|
||||
func (js *OttoPluginLang) AddPlugin(meta PluginMeta) (plugin *Plugin, err error) {
|
||||
script, err := js.vm.Compile("./extend/"+meta.UName+"/"+meta.Main, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
script, err := js.vm.Compile("./extend/"+meta.UName+"/"+meta.Main, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pluginInit = func(plugin *Plugin) error {
|
||||
retValue, err := js.vm.Run(script)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if retValue.IsString() {
|
||||
ret, err := retValue.ToString()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ret != "" {
|
||||
return errors.New(ret)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
var pluginInit = func() error {
|
||||
retValue, err := js.vm.Run(script)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if retValue.IsString() {
|
||||
ret, err := retValue.ToString()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ret != "" {
|
||||
return errors.New(ret)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
plugin = new(Plugin)
|
||||
plugin.UName = meta.UName
|
||||
plugin.Name = meta.Name
|
||||
plugin.Author = meta.Author
|
||||
plugin.URL = meta.URL
|
||||
plugin.Settings = meta.Settings
|
||||
plugin.Tag = meta.Tag
|
||||
plugin.Type = "ottojs"
|
||||
plugin.Init = pluginInit
|
||||
plugin = new(Plugin)
|
||||
plugin.UName = meta.UName
|
||||
plugin.Name = meta.Name
|
||||
plugin.Author = meta.Author
|
||||
plugin.URL = meta.URL
|
||||
plugin.Settings = meta.Settings
|
||||
plugin.Tag = meta.Tag
|
||||
plugin.Type = "ottojs"
|
||||
plugin.Init = pluginInit
|
||||
|
||||
// TODO: Implement plugin life cycle events
|
||||
// TODO: Implement plugin life cycle events
|
||||
|
||||
buildPlugin(plugin)
|
||||
buildPlugin(plugin)
|
||||
|
||||
plugin.Data = script
|
||||
return plugin, nil
|
||||
plugin.Data = script
|
||||
return plugin, nil
|
||||
}
|
||||
|
||||
/*func (js *OttoPluginLang) addHook(hook string, plugin string) {
|
||||
hooks[hook] = func(data interface{}) interface{} {
|
||||
switch d := data.(type) {
|
||||
case Page:
|
||||
currentPage := js.vars["current_page"]
|
||||
currentPage.Set("Title", d.Title)
|
||||
case TopicPage:
|
||||
hooks[hook] = func(data interface{}) interface{} {
|
||||
switch d := data.(type) {
|
||||
case Page:
|
||||
currentPage := js.vars["current_page"]
|
||||
currentPage.Set("Title", d.Title)
|
||||
case TopicPage:
|
||||
|
||||
case ProfilePage:
|
||||
case ProfilePage:
|
||||
|
||||
case Reply:
|
||||
case Reply:
|
||||
|
||||
default:
|
||||
log.Print("Not a valid JS datatype")
|
||||
}
|
||||
}
|
||||
default:
|
||||
log.Print("Not a valid JS datatype")
|
||||
}
|
||||
}
|
||||
}*/
|
||||
|
@ -20,15 +20,15 @@ func (_ *WSHub) userCount() int { return 0 }
|
||||
func (hub *WSHub) broadcastMessage(_ string) error { return nil }
|
||||
|
||||
func (hub *WSHub) pushMessage(_ int, _ string) error {
|
||||
return errWsNouser
|
||||
return errWsNouser
|
||||
}
|
||||
|
||||
func (hub *WSHub) pushAlert(_ int, _ int, _ string, _ string, _ int, _ int, _ int) error {
|
||||
return errWsNouser
|
||||
return errWsNouser
|
||||
}
|
||||
|
||||
func (hub *WSHub) pushAlerts(_ []int, _ int, _ string, _ string, _ int, _ int, _ int) error {
|
||||
return errWsNouser
|
||||
return errWsNouser
|
||||
}
|
||||
|
||||
func RouteWebsockets(_ http.ResponseWriter, _ *http.Request, _ User) {}
|
||||
|
@ -1,46 +0,0 @@
|
||||
package common
|
||||
|
||||
// NullReplyCache is a reply cache to be used when you don't want a cache and just want queries to passthrough to the database
|
||||
type NullReplyCache struct {
|
||||
}
|
||||
|
||||
// NewNullReplyCache gives you a new instance of NullReplyCache
|
||||
func NewNullReplyCache() *NullReplyCache {
|
||||
return &NullReplyCache{}
|
||||
}
|
||||
|
||||
// nolint
|
||||
func (c *NullReplyCache) Get(id int) (*Reply, error) {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
func (c *NullReplyCache) GetUnsafe(id int) (*Reply, error) {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
func (c *NullReplyCache) BulkGet(ids []int) (list []*Reply) {
|
||||
return make([]*Reply, len(ids))
|
||||
}
|
||||
func (c *NullReplyCache) Set(_ *Reply) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullReplyCache) Add(_ *Reply) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullReplyCache) AddUnsafe(_ *Reply) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullReplyCache) Remove(id int) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullReplyCache) RemoveUnsafe(id int) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullReplyCache) Flush() {
|
||||
}
|
||||
func (c *NullReplyCache) Length() int {
|
||||
return 0
|
||||
}
|
||||
func (c *NullReplyCache) SetCapacity(_ int) {
|
||||
}
|
||||
func (c *NullReplyCache) GetCapacity() int {
|
||||
return 0
|
||||
}
|
@ -6,44 +6,38 @@ type NullTopicCache struct {
|
||||
|
||||
// NewNullTopicCache gives you a new instance of NullTopicCache
|
||||
func NewNullTopicCache() *NullTopicCache {
|
||||
return &NullTopicCache{}
|
||||
return &NullTopicCache{}
|
||||
}
|
||||
|
||||
// nolint
|
||||
func (c *NullTopicCache) Get(id int) (*Topic, error) {
|
||||
return nil, ErrNoRows
|
||||
func (mts *NullTopicCache) Get(id int) (*Topic, error) {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
func (c *NullTopicCache) GetUnsafe(id int) (*Topic, error) {
|
||||
return nil, ErrNoRows
|
||||
func (mts *NullTopicCache) GetUnsafe(id int) (*Topic, error) {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
func (c *NullTopicCache) BulkGet(ids []int) (list []*Topic) {
|
||||
return make([]*Topic, len(ids))
|
||||
func (mts *NullTopicCache) Set(_ *Topic) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullTopicCache) Set(_ *Topic) error {
|
||||
return nil
|
||||
func (mts *NullTopicCache) Add(_ *Topic) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullTopicCache) Add(_ *Topic) error {
|
||||
return nil
|
||||
func (mts *NullTopicCache) AddUnsafe(_ *Topic) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullTopicCache) AddUnsafe(_ *Topic) error {
|
||||
return nil
|
||||
func (mts *NullTopicCache) Remove(id int) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullTopicCache) Remove(id int) error {
|
||||
return nil
|
||||
func (mts *NullTopicCache) RemoveUnsafe(id int) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullTopicCache) RemoveMany(ids []int) error {
|
||||
return nil
|
||||
func (mts *NullTopicCache) Flush() {
|
||||
}
|
||||
func (c *NullTopicCache) RemoveUnsafe(id int) error {
|
||||
return nil
|
||||
func (mts *NullTopicCache) Length() int {
|
||||
return 0
|
||||
}
|
||||
func (c *NullTopicCache) Flush() {
|
||||
func (mts *NullTopicCache) SetCapacity(_ int) {
|
||||
}
|
||||
func (c *NullTopicCache) Length() int {
|
||||
return 0
|
||||
}
|
||||
func (c *NullTopicCache) SetCapacity(_ int) {
|
||||
}
|
||||
func (c *NullTopicCache) GetCapacity() int {
|
||||
return 0
|
||||
func (mts *NullTopicCache) GetCapacity() int {
|
||||
return 0
|
||||
}
|
||||
|
@ -6,49 +6,45 @@ type NullUserCache struct {
|
||||
|
||||
// NewNullUserCache gives you a new instance of NullUserCache
|
||||
func NewNullUserCache() *NullUserCache {
|
||||
return &NullUserCache{}
|
||||
return &NullUserCache{}
|
||||
}
|
||||
|
||||
// nolint
|
||||
func (c *NullUserCache) DeallocOverflow(evictPriority bool) (evicted int) {
|
||||
return 0
|
||||
func (mus *NullUserCache) DeallocOverflow(evictPriority bool) (evicted int) {
|
||||
return 0
|
||||
}
|
||||
func (c *NullUserCache) Get(id int) (*User, error) {
|
||||
return nil, ErrNoRows
|
||||
func (mus *NullUserCache) Get(id int) (*User, error) {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
|
||||
func (c *NullUserCache) Getn(id int) *User {
|
||||
return nil
|
||||
func (mus *NullUserCache) BulkGet(ids []int) (list []*User) {
|
||||
return make([]*User, len(ids))
|
||||
}
|
||||
func (c *NullUserCache) BulkGet(ids []int) (list []*User) {
|
||||
return make([]*User, len(ids))
|
||||
func (mus *NullUserCache) GetUnsafe(id int) (*User, error) {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
func (c *NullUserCache) GetUnsafe(id int) (*User, error) {
|
||||
return nil, ErrNoRows
|
||||
func (mus *NullUserCache) Set(_ *User) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullUserCache) Set(_ *User) error {
|
||||
return nil
|
||||
func (mus *NullUserCache) Add(_ *User) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullUserCache) Add(_ *User) error {
|
||||
return nil
|
||||
func (mus *NullUserCache) AddUnsafe(_ *User) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullUserCache) AddUnsafe(_ *User) error {
|
||||
return nil
|
||||
func (mus *NullUserCache) Remove(id int) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullUserCache) Remove(id int) error {
|
||||
return nil
|
||||
func (mus *NullUserCache) RemoveUnsafe(id int) error {
|
||||
return nil
|
||||
}
|
||||
func (c *NullUserCache) RemoveUnsafe(id int) error {
|
||||
return nil
|
||||
func (mus *NullUserCache) BulkRemove(ids []int) {}
|
||||
func (mus *NullUserCache) Flush() {
|
||||
}
|
||||
func (c *NullUserCache) BulkRemove(ids []int) {}
|
||||
func (c *NullUserCache) Flush() {
|
||||
func (mus *NullUserCache) Length() int {
|
||||
return 0
|
||||
}
|
||||
func (c *NullUserCache) Length() int {
|
||||
return 0
|
||||
func (mus *NullUserCache) SetCapacity(_ int) {
|
||||
}
|
||||
func (c *NullUserCache) SetCapacity(_ int) {
|
||||
}
|
||||
func (c *NullUserCache) GetCapacity() int {
|
||||
return 0
|
||||
func (mus *NullUserCache) GetCapacity() int {
|
||||
return 0
|
||||
}
|
||||
|
@ -5,172 +5,171 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
type CustomPageStmts struct {
|
||||
update *sql.Stmt
|
||||
create *sql.Stmt
|
||||
update *sql.Stmt
|
||||
create *sql.Stmt
|
||||
}
|
||||
|
||||
var customPageStmts CustomPageStmts
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
customPageStmts = CustomPageStmts{
|
||||
update: acc.Update("pages").Set("name=?,title=?,body=?,allowedGroups=?,menuID=?").Where("pid=?").Prepare(),
|
||||
create: acc.Insert("pages").Columns("name,title,body,allowedGroups,menuID").Fields("?,?,?,?,?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
customPageStmts = CustomPageStmts{
|
||||
update: acc.Update("pages").Set("name = ?, title = ?, body = ?, allowedGroups = ?, menuID = ?").Where("pid = ?").Prepare(),
|
||||
create: acc.Insert("pages").Columns("name, title, body, allowedGroups, menuID").Fields("?,?,?,?,?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
type CustomPage struct {
|
||||
ID int
|
||||
Name string // TODO: Let admins put pages in "virtual subdirectories"
|
||||
Title string
|
||||
Body string
|
||||
AllowedGroups []int
|
||||
MenuID int
|
||||
ID int
|
||||
Name string // TODO: Let admins put pages in "virtual subdirectories"
|
||||
Title string
|
||||
Body string
|
||||
AllowedGroups []int
|
||||
MenuID int
|
||||
}
|
||||
|
||||
func BlankCustomPage() *CustomPage {
|
||||
return new(CustomPage)
|
||||
return new(CustomPage)
|
||||
}
|
||||
|
||||
func (p *CustomPage) AddAllowedGroup(gid int) {
|
||||
p.AllowedGroups = append(p.AllowedGroups, gid)
|
||||
func (page *CustomPage) AddAllowedGroup(gid int) {
|
||||
page.AllowedGroups = append(page.AllowedGroups, gid)
|
||||
}
|
||||
|
||||
func (p *CustomPage) getRawAllowedGroups() (rawAllowedGroups string) {
|
||||
for _, group := range p.AllowedGroups {
|
||||
rawAllowedGroups += strconv.Itoa(group) + ","
|
||||
}
|
||||
if len(rawAllowedGroups) > 0 {
|
||||
rawAllowedGroups = rawAllowedGroups[:len(rawAllowedGroups)-1]
|
||||
}
|
||||
return rawAllowedGroups
|
||||
func (page *CustomPage) getRawAllowedGroups() (rawAllowedGroups string) {
|
||||
for _, group := range page.AllowedGroups {
|
||||
rawAllowedGroups += strconv.Itoa(group) + ","
|
||||
}
|
||||
if len(rawAllowedGroups) > 0 {
|
||||
rawAllowedGroups = rawAllowedGroups[:len(rawAllowedGroups)-1]
|
||||
}
|
||||
return rawAllowedGroups
|
||||
}
|
||||
|
||||
func (p *CustomPage) Commit() error {
|
||||
_, err := customPageStmts.update.Exec(p.Name, p.Title, p.Body, p.getRawAllowedGroups(), p.MenuID, p.ID)
|
||||
Pages.Reload(p.ID)
|
||||
return err
|
||||
func (page *CustomPage) Commit() error {
|
||||
_, err := customPageStmts.update.Exec(page.Name, page.Title, page.Body, page.getRawAllowedGroups(), page.MenuID, page.ID)
|
||||
Pages.Reload(page.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (p *CustomPage) Create() (int, error) {
|
||||
res, err := customPageStmts.create.Exec(p.Name, p.Title, p.Body, p.getRawAllowedGroups(), p.MenuID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
pid64, err := res.LastInsertId()
|
||||
return int(pid64), err
|
||||
func (page *CustomPage) Create() (int, error) {
|
||||
res, err := customPageStmts.create.Exec(page.Name, page.Title, page.Body, page.getRawAllowedGroups(), page.MenuID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
pid64, err := res.LastInsertId()
|
||||
return int(pid64), err
|
||||
}
|
||||
|
||||
var Pages PageStore
|
||||
|
||||
// Holds the custom pages, but doesn't include the template pages in /pages/ which are a lot more flexible yet harder to use and which are too risky security-wise to make editable in the Control Panel
|
||||
type PageStore interface {
|
||||
Count() (count int)
|
||||
Get(id int) (*CustomPage, error)
|
||||
GetByName(name string) (*CustomPage, error)
|
||||
GetOffset(offset, perPage int) (pages []*CustomPage, err error)
|
||||
Reload(id int) error
|
||||
Delete(id int) error
|
||||
GlobalCount() (pageCount int)
|
||||
Get(id int) (*CustomPage, error)
|
||||
GetByName(name string) (*CustomPage, error)
|
||||
GetOffset(offset int, perPage int) (pages []*CustomPage, err error)
|
||||
Reload(id int) error
|
||||
Delete(id int) error
|
||||
}
|
||||
|
||||
// TODO: Add a cache to this to save on the queries
|
||||
type DefaultPageStore struct {
|
||||
get *sql.Stmt
|
||||
getByName *sql.Stmt
|
||||
getOffset *sql.Stmt
|
||||
count *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
get *sql.Stmt
|
||||
getByName *sql.Stmt
|
||||
getOffset *sql.Stmt
|
||||
count *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultPageStore(acc *qgen.Accumulator) (*DefaultPageStore, error) {
|
||||
pa := "pages"
|
||||
allCols := "pid, name, title, body, allowedGroups, menuID"
|
||||
return &DefaultPageStore{
|
||||
get: acc.Select(pa).Columns("name, title, body, allowedGroups, menuID").Where("pid=?").Prepare(),
|
||||
getByName: acc.Select(pa).Columns(allCols).Where("name=?").Prepare(),
|
||||
getOffset: acc.Select(pa).Columns(allCols).Orderby("pid DESC").Limit("?,?").Prepare(),
|
||||
count: acc.Count(pa).Prepare(),
|
||||
delete: acc.Delete(pa).Where("pid=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
return &DefaultPageStore{
|
||||
get: acc.Select("pages").Columns("name, title, body, allowedGroups, menuID").Where("pid = ?").Prepare(),
|
||||
getByName: acc.Select("pages").Columns("pid, name, title, body, allowedGroups, menuID").Where("name = ?").Prepare(),
|
||||
getOffset: acc.Select("pages").Columns("pid, name, title, body, allowedGroups, menuID").Orderby("pid DESC").Limit("?,?").Prepare(),
|
||||
count: acc.Count("pages").Prepare(),
|
||||
delete: acc.Delete("pages").Where("pid = ?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultPageStore) Count() (count int) {
|
||||
err := s.count.QueryRow().Scan(&count)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return count
|
||||
func (store *DefaultPageStore) GlobalCount() (pageCount int) {
|
||||
err := store.count.QueryRow().Scan(&pageCount)
|
||||
if err != nil {
|
||||
LogError(err)
|
||||
}
|
||||
return pageCount
|
||||
}
|
||||
|
||||
func (s *DefaultPageStore) parseAllowedGroups(raw string, page *CustomPage) error {
|
||||
if raw == "" {
|
||||
return nil
|
||||
}
|
||||
for _, sgroup := range strings.Split(raw, ",") {
|
||||
group, err := strconv.Atoi(sgroup)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
page.AddAllowedGroup(group)
|
||||
}
|
||||
return nil
|
||||
func (store *DefaultPageStore) parseAllowedGroups(raw string, page *CustomPage) error {
|
||||
if raw == "" {
|
||||
return nil
|
||||
}
|
||||
for _, sgroup := range strings.Split(raw, ",") {
|
||||
group, err := strconv.Atoi(sgroup)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
page.AddAllowedGroup(group)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DefaultPageStore) Get(id int) (*CustomPage, error) {
|
||||
p := &CustomPage{ID: id}
|
||||
rawAllowedGroups := ""
|
||||
err := s.get.QueryRow(id).Scan(&p.Name, &p.Title, &p.Body, &rawAllowedGroups, &p.MenuID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return p, s.parseAllowedGroups(rawAllowedGroups, p)
|
||||
func (store *DefaultPageStore) Get(id int) (*CustomPage, error) {
|
||||
page := &CustomPage{ID: id}
|
||||
rawAllowedGroups := ""
|
||||
err := store.get.QueryRow(id).Scan(&page.Name, &page.Title, &page.Body, &rawAllowedGroups, &page.MenuID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return page, store.parseAllowedGroups(rawAllowedGroups, page)
|
||||
}
|
||||
|
||||
func (s *DefaultPageStore) GetByName(name string) (*CustomPage, error) {
|
||||
p := BlankCustomPage()
|
||||
rawAllowedGroups := ""
|
||||
err := s.getByName.QueryRow(name).Scan(&p.ID, &p.Name, &p.Title, &p.Body, &rawAllowedGroups, &p.MenuID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return p, s.parseAllowedGroups(rawAllowedGroups, p)
|
||||
func (store *DefaultPageStore) GetByName(name string) (*CustomPage, error) {
|
||||
page := BlankCustomPage()
|
||||
rawAllowedGroups := ""
|
||||
err := store.getByName.QueryRow(name).Scan(&page.ID, &page.Name, &page.Title, &page.Body, &rawAllowedGroups, &page.MenuID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return page, store.parseAllowedGroups(rawAllowedGroups, page)
|
||||
}
|
||||
|
||||
func (s *DefaultPageStore) GetOffset(offset, perPage int) (pages []*CustomPage, err error) {
|
||||
rows, err := s.getOffset.Query(offset, perPage)
|
||||
if err != nil {
|
||||
return pages, err
|
||||
}
|
||||
defer rows.Close()
|
||||
func (store *DefaultPageStore) GetOffset(offset int, perPage int) (pages []*CustomPage, err error) {
|
||||
rows, err := store.getOffset.Query(offset, perPage)
|
||||
if err != nil {
|
||||
return pages, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
p := &CustomPage{ID: 0}
|
||||
rawAllowedGroups := ""
|
||||
err := rows.Scan(&p.ID, &p.Name, &p.Title, &p.Body, &rawAllowedGroups, &p.MenuID)
|
||||
if err != nil {
|
||||
return pages, err
|
||||
}
|
||||
err = s.parseAllowedGroups(rawAllowedGroups, p)
|
||||
if err != nil {
|
||||
return pages, err
|
||||
}
|
||||
pages = append(pages, p)
|
||||
}
|
||||
return pages, rows.Err()
|
||||
for rows.Next() {
|
||||
page := &CustomPage{ID: 0}
|
||||
rawAllowedGroups := ""
|
||||
err := rows.Scan(&page.ID, &page.Name, &page.Title, &page.Body, &rawAllowedGroups, &page.MenuID)
|
||||
if err != nil {
|
||||
return pages, err
|
||||
}
|
||||
err = store.parseAllowedGroups(rawAllowedGroups, page)
|
||||
if err != nil {
|
||||
return pages, err
|
||||
}
|
||||
pages = append(pages, page)
|
||||
}
|
||||
return pages, rows.Err()
|
||||
}
|
||||
|
||||
// Always returns nil as there's currently no cache
|
||||
func (s *DefaultPageStore) Reload(id int) error {
|
||||
return nil
|
||||
func (store *DefaultPageStore) Reload(id int) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DefaultPageStore) Delete(id int) error {
|
||||
_, err := s.delete.Exec(id)
|
||||
return err
|
||||
func (store *DefaultPageStore) Delete(id int) error {
|
||||
_, err := store.delete.Exec(id)
|
||||
return err
|
||||
}
|
||||
|
879
common/pages.go
879
common/pages.go
File diff suppressed because it is too large
Load Diff
1997
common/parser.go
1997
common/parser.go
File diff suppressed because it is too large
Load Diff
@ -1,76 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"crypto/subtle"
|
||||
"database/sql"
|
||||
"errors"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var PasswordResetter *DefaultPasswordResetter
|
||||
var ErrBadResetToken = errors.New("This reset token has expired.")
|
||||
|
||||
type DefaultPasswordResetter struct {
|
||||
getTokens *sql.Stmt
|
||||
create *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
}
|
||||
|
||||
/*
|
||||
type PasswordReset struct {
|
||||
Email string `q:"email"`
|
||||
Uid int `q:"uid"`
|
||||
Validated bool `q:"validated"`
|
||||
Token string `q:"token"`
|
||||
CreatedAt time.Time `q:"createdAt"`
|
||||
}
|
||||
*/
|
||||
|
||||
func NewDefaultPasswordResetter(acc *qgen.Accumulator) (*DefaultPasswordResetter, error) {
|
||||
pr := "password_resets"
|
||||
return &DefaultPasswordResetter{
|
||||
getTokens: acc.Select(pr).Columns("token").Where("uid=?").Prepare(),
|
||||
create: acc.Insert(pr).Columns("email,uid,validated,token,createdAt").Fields("?,?,0,?,UTC_TIMESTAMP()").Prepare(),
|
||||
//create: acc.Insert(pr).Cols("email,uid,validated=0,token,createdAt=UTC_TIMESTAMP()").Prep(),
|
||||
delete: acc.Delete(pr).Where("uid=?").Prepare(),
|
||||
//model: acc.Model(w).Cols("email,uid,validated=0,token").Key("uid").CreatedAt("createdAt").Prep(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (r *DefaultPasswordResetter) Create(email string, uid int, token string) error {
|
||||
_, err := r.create.Exec(email, uid, token)
|
||||
return err
|
||||
}
|
||||
|
||||
func (r *DefaultPasswordResetter) FlushTokens(uid int) error {
|
||||
_, err := r.delete.Exec(uid)
|
||||
return err
|
||||
}
|
||||
|
||||
func (r *DefaultPasswordResetter) ValidateToken(uid int, token string) error {
|
||||
rows, err := r.getTokens.Query(uid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
success := false
|
||||
for rows.Next() {
|
||||
var rtoken string
|
||||
if err := rows.Scan(&rtoken); err != nil {
|
||||
return err
|
||||
}
|
||||
if subtle.ConstantTimeCompare([]byte(token), []byte(rtoken)) == 1 {
|
||||
success = true
|
||||
}
|
||||
}
|
||||
if err = rows.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !success {
|
||||
return ErrBadResetToken
|
||||
}
|
||||
return nil
|
||||
}
|
@ -4,8 +4,7 @@ import (
|
||||
"encoding/json"
|
||||
"log"
|
||||
|
||||
"git.tuxpa.in/a/gosora/common/phrases"
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
// TODO: Refactor the perms system
|
||||
@ -18,228 +17,216 @@ var AllPluginPerms = make(map[string]bool)
|
||||
|
||||
// ? - Can we avoid duplicating the items in this list in a bunch of places?
|
||||
var GlobalPermList = []string{
|
||||
"BanUsers",
|
||||
"ActivateUsers",
|
||||
"EditUser",
|
||||
"EditUserEmail",
|
||||
"EditUserPassword",
|
||||
"EditUserGroup",
|
||||
"EditUserGroupSuperMod",
|
||||
"EditUserGroupAdmin",
|
||||
"EditGroup",
|
||||
"EditGroupLocalPerms",
|
||||
"EditGroupGlobalPerms",
|
||||
"EditGroupSuperMod",
|
||||
"EditGroupAdmin",
|
||||
"ManageForums",
|
||||
"EditSettings",
|
||||
"ManageThemes",
|
||||
"ManagePlugins",
|
||||
"ViewAdminLogs",
|
||||
"ViewIPs",
|
||||
"UploadFiles",
|
||||
"UploadAvatars",
|
||||
"UseConvos",
|
||||
"UseConvosOnlyWithMod",
|
||||
"CreateProfileReply",
|
||||
"AutoEmbed",
|
||||
"AutoLink",
|
||||
"BanUsers",
|
||||
"ActivateUsers",
|
||||
"EditUser",
|
||||
"EditUserEmail",
|
||||
"EditUserPassword",
|
||||
"EditUserGroup",
|
||||
"EditUserGroupSuperMod",
|
||||
"EditUserGroupAdmin",
|
||||
"EditGroup",
|
||||
"EditGroupLocalPerms",
|
||||
"EditGroupGlobalPerms",
|
||||
"EditGroupSuperMod",
|
||||
"EditGroupAdmin",
|
||||
"ManageForums",
|
||||
"EditSettings",
|
||||
"ManageThemes",
|
||||
"ManagePlugins",
|
||||
"ViewAdminLogs",
|
||||
"ViewIPs",
|
||||
"UploadFiles",
|
||||
}
|
||||
|
||||
// Permission Structure: ActionComponent[Subcomponent]Flag
|
||||
type Perms struct {
|
||||
// Global Permissions
|
||||
BanUsers bool `json:",omitempty"`
|
||||
ActivateUsers bool `json:",omitempty"`
|
||||
EditUser bool `json:",omitempty"`
|
||||
EditUserEmail bool `json:",omitempty"`
|
||||
EditUserPassword bool `json:",omitempty"`
|
||||
EditUserGroup bool `json:",omitempty"`
|
||||
EditUserGroupSuperMod bool `json:",omitempty"`
|
||||
EditUserGroupAdmin bool `json:",omitempty"`
|
||||
EditGroup bool `json:",omitempty"`
|
||||
EditGroupLocalPerms bool `json:",omitempty"`
|
||||
EditGroupGlobalPerms bool `json:",omitempty"`
|
||||
EditGroupSuperMod bool `json:",omitempty"`
|
||||
EditGroupAdmin bool `json:",omitempty"`
|
||||
ManageForums bool `json:",omitempty"` // This could be local, albeit limited for per-forum managers?
|
||||
EditSettings bool `json:",omitempty"`
|
||||
ManageThemes bool `json:",omitempty"`
|
||||
ManagePlugins bool `json:",omitempty"`
|
||||
ViewAdminLogs bool `json:",omitempty"`
|
||||
ViewIPs bool `json:",omitempty"`
|
||||
// Global Permissions
|
||||
BanUsers bool
|
||||
ActivateUsers bool
|
||||
EditUser bool
|
||||
EditUserEmail bool
|
||||
EditUserPassword bool
|
||||
EditUserGroup bool
|
||||
EditUserGroupSuperMod bool
|
||||
EditUserGroupAdmin bool
|
||||
EditGroup bool
|
||||
EditGroupLocalPerms bool
|
||||
EditGroupGlobalPerms bool
|
||||
EditGroupSuperMod bool
|
||||
EditGroupAdmin bool
|
||||
ManageForums bool // This could be local, albeit limited for per-forum managers?
|
||||
EditSettings bool
|
||||
ManageThemes bool
|
||||
ManagePlugins bool
|
||||
ViewAdminLogs bool
|
||||
ViewIPs bool
|
||||
|
||||
// Global non-staff permissions
|
||||
UploadFiles bool `json:",omitempty"`
|
||||
UploadAvatars bool `json:",omitempty"`
|
||||
UseConvos bool `json:",omitempty"`
|
||||
UseConvosOnlyWithMod bool `json:",omitempty"`
|
||||
CreateProfileReply bool `json:",omitempty"`
|
||||
AutoEmbed bool `json:",omitempty"`
|
||||
AutoLink bool `json:",omitempty"`
|
||||
// Global non-staff permissions
|
||||
UploadFiles bool
|
||||
// TODO: Add a permission for enabling avatars
|
||||
|
||||
// Forum permissions
|
||||
ViewTopic bool `json:",omitempty"`
|
||||
//ViewOwnTopic bool `json:",omitempty"`
|
||||
LikeItem bool `json:",omitempty"`
|
||||
CreateTopic bool `json:",omitempty"`
|
||||
EditTopic bool `json:",omitempty"`
|
||||
DeleteTopic bool `json:",omitempty"`
|
||||
CreateReply bool `json:",omitempty"`
|
||||
//CreateReplyToOwn bool `json:",omitempty"`
|
||||
EditReply bool `json:",omitempty"`
|
||||
//EditOwnReply bool `json:",omitempty"`
|
||||
DeleteReply bool `json:",omitempty"`
|
||||
//DeleteOwnReply bool `json:",omitempty"`
|
||||
PinTopic bool `json:",omitempty"`
|
||||
CloseTopic bool `json:",omitempty"`
|
||||
//CloseOwnTopic bool `json:",omitempty"`
|
||||
MoveTopic bool `json:",omitempty"`
|
||||
// Forum permissions
|
||||
ViewTopic bool
|
||||
//ViewOwnTopic bool
|
||||
LikeItem bool
|
||||
CreateTopic bool
|
||||
EditTopic bool
|
||||
DeleteTopic bool
|
||||
CreateReply bool
|
||||
//CreateReplyToOwn bool
|
||||
EditReply bool
|
||||
//EditOwnReply bool
|
||||
DeleteReply bool
|
||||
//DeleteOwnReply bool
|
||||
PinTopic bool
|
||||
CloseTopic bool
|
||||
//CloseOwnTopic bool
|
||||
MoveTopic bool
|
||||
|
||||
//ExtData map[string]bool `json:",omitempty"`
|
||||
//ExtData map[string]bool
|
||||
}
|
||||
|
||||
func init() {
|
||||
BlankPerms = Perms{
|
||||
//ExtData: make(map[string]bool),
|
||||
}
|
||||
BlankPerms = Perms{
|
||||
//ExtData: make(map[string]bool),
|
||||
}
|
||||
|
||||
GuestPerms = Perms{
|
||||
ViewTopic: true,
|
||||
//ExtData: make(map[string]bool),
|
||||
}
|
||||
GuestPerms = Perms{
|
||||
ViewTopic: true,
|
||||
//ExtData: make(map[string]bool),
|
||||
}
|
||||
|
||||
AllPerms = Perms{
|
||||
BanUsers: true,
|
||||
ActivateUsers: true,
|
||||
EditUser: true,
|
||||
EditUserEmail: true,
|
||||
EditUserPassword: true,
|
||||
EditUserGroup: true,
|
||||
EditUserGroupSuperMod: true,
|
||||
EditUserGroupAdmin: true,
|
||||
EditGroup: true,
|
||||
EditGroupLocalPerms: true,
|
||||
EditGroupGlobalPerms: true,
|
||||
EditGroupSuperMod: true,
|
||||
EditGroupAdmin: true,
|
||||
ManageForums: true,
|
||||
EditSettings: true,
|
||||
ManageThemes: true,
|
||||
ManagePlugins: true,
|
||||
ViewAdminLogs: true,
|
||||
ViewIPs: true,
|
||||
AllPerms = Perms{
|
||||
BanUsers: true,
|
||||
ActivateUsers: true,
|
||||
EditUser: true,
|
||||
EditUserEmail: true,
|
||||
EditUserPassword: true,
|
||||
EditUserGroup: true,
|
||||
EditUserGroupSuperMod: true,
|
||||
EditUserGroupAdmin: true,
|
||||
EditGroup: true,
|
||||
EditGroupLocalPerms: true,
|
||||
EditGroupGlobalPerms: true,
|
||||
EditGroupSuperMod: true,
|
||||
EditGroupAdmin: true,
|
||||
ManageForums: true,
|
||||
EditSettings: true,
|
||||
ManageThemes: true,
|
||||
ManagePlugins: true,
|
||||
ViewAdminLogs: true,
|
||||
ViewIPs: true,
|
||||
|
||||
UploadFiles: true,
|
||||
UploadAvatars: true,
|
||||
UseConvos: true,
|
||||
UseConvosOnlyWithMod: true,
|
||||
CreateProfileReply: true,
|
||||
AutoEmbed: true,
|
||||
AutoLink: true,
|
||||
UploadFiles: true,
|
||||
|
||||
ViewTopic: true,
|
||||
LikeItem: true,
|
||||
CreateTopic: true,
|
||||
EditTopic: true,
|
||||
DeleteTopic: true,
|
||||
CreateReply: true,
|
||||
EditReply: true,
|
||||
DeleteReply: true,
|
||||
PinTopic: true,
|
||||
CloseTopic: true,
|
||||
MoveTopic: true,
|
||||
ViewTopic: true,
|
||||
LikeItem: true,
|
||||
CreateTopic: true,
|
||||
EditTopic: true,
|
||||
DeleteTopic: true,
|
||||
CreateReply: true,
|
||||
EditReply: true,
|
||||
DeleteReply: true,
|
||||
PinTopic: true,
|
||||
CloseTopic: true,
|
||||
MoveTopic: true,
|
||||
|
||||
//ExtData: make(map[string]bool),
|
||||
}
|
||||
//ExtData: make(map[string]bool),
|
||||
}
|
||||
|
||||
GuestUser.Perms = GuestPerms
|
||||
DebugLogf("Guest Perms: %+v\n", GuestPerms)
|
||||
DebugLogf("All Perms: %+v\n", AllPerms)
|
||||
GuestUser.Perms = GuestPerms
|
||||
DebugLogf("Guest Perms: %+v\n", GuestPerms)
|
||||
DebugLogf("All Perms: %+v\n", AllPerms)
|
||||
}
|
||||
|
||||
func StripInvalidGroupForumPreset(preset string) string {
|
||||
switch preset {
|
||||
case "read_only", "can_post", "can_moderate", "no_access", "default", "custom":
|
||||
return preset
|
||||
}
|
||||
return ""
|
||||
switch preset {
|
||||
case "read_only", "can_post", "can_moderate", "no_access", "default", "custom":
|
||||
return preset
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func StripInvalidPreset(preset string) string {
|
||||
switch preset {
|
||||
case "all", "announce", "members", "staff", "admins", "archive", "custom":
|
||||
return preset
|
||||
}
|
||||
return ""
|
||||
switch preset {
|
||||
case "all", "announce", "members", "staff", "admins", "archive", "custom":
|
||||
return preset
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// TODO: Move this into the phrase system?
|
||||
func PresetToLang(preset string) string {
|
||||
phrases := phrases.GetAllPermPresets()
|
||||
phrase, ok := phrases[preset]
|
||||
if !ok {
|
||||
phrase = phrases["unknown"]
|
||||
}
|
||||
return phrase
|
||||
phrases := GetAllPermPresets()
|
||||
phrase, ok := phrases[preset]
|
||||
if !ok {
|
||||
phrase = phrases["unknown"]
|
||||
}
|
||||
return phrase
|
||||
}
|
||||
|
||||
// TODO: Is this racey?
|
||||
// TODO: Test this along with the rest of the perms system
|
||||
func RebuildGroupPermissions(g *Group) error {
|
||||
var permstr []byte
|
||||
log.Print("Reloading a group")
|
||||
func RebuildGroupPermissions(gid int) error {
|
||||
var permstr []byte
|
||||
log.Print("Reloading a group")
|
||||
|
||||
// TODO: Avoid re-initting this all the time
|
||||
getGroupPerms, e := qgen.Builder.SimpleSelect("users_groups", "permissions", "gid=?", "", "")
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
defer getGroupPerms.Close()
|
||||
// TODO: Avoid re-initting this all the time
|
||||
getGroupPerms, err := qgen.Builder.SimpleSelect("users_groups", "permissions", "gid = ?", "", "")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer getGroupPerms.Close()
|
||||
|
||||
e = getGroupPerms.QueryRow(g.ID).Scan(&permstr)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
err = getGroupPerms.QueryRow(gid).Scan(&permstr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tmpPerms := Perms{
|
||||
//ExtData: make(map[string]bool),
|
||||
}
|
||||
e = json.Unmarshal(permstr, &tmpPerms)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
g.Perms = tmpPerms
|
||||
return nil
|
||||
tmpPerms := Perms{
|
||||
//ExtData: make(map[string]bool),
|
||||
}
|
||||
err = json.Unmarshal(permstr, &tmpPerms)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
group, err := Groups.Get(gid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
group.Perms = tmpPerms
|
||||
return nil
|
||||
}
|
||||
|
||||
func OverridePerms(p *Perms, status bool) {
|
||||
if status {
|
||||
*p = AllPerms
|
||||
} else {
|
||||
*p = BlankPerms
|
||||
}
|
||||
func OverridePerms(perms *Perms, status bool) {
|
||||
if status {
|
||||
*perms = AllPerms
|
||||
} else {
|
||||
*perms = BlankPerms
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: We need a better way of overriding forum perms rather than setting them one by one
|
||||
func OverrideForumPerms(p *Perms, status bool) {
|
||||
p.ViewTopic = status
|
||||
p.LikeItem = status
|
||||
p.CreateTopic = status
|
||||
p.EditTopic = status
|
||||
p.DeleteTopic = status
|
||||
p.CreateReply = status
|
||||
p.EditReply = status
|
||||
p.DeleteReply = status
|
||||
p.PinTopic = status
|
||||
p.CloseTopic = status
|
||||
p.MoveTopic = status
|
||||
func OverrideForumPerms(perms *Perms, status bool) {
|
||||
perms.ViewTopic = status
|
||||
perms.LikeItem = status
|
||||
perms.CreateTopic = status
|
||||
perms.EditTopic = status
|
||||
perms.DeleteTopic = status
|
||||
perms.CreateReply = status
|
||||
perms.EditReply = status
|
||||
perms.DeleteReply = status
|
||||
perms.PinTopic = status
|
||||
perms.CloseTopic = status
|
||||
perms.MoveTopic = status
|
||||
}
|
||||
|
||||
func RegisterPluginPerm(name string) {
|
||||
AllPluginPerms[name] = true
|
||||
AllPluginPerms[name] = true
|
||||
}
|
||||
|
||||
func DeregisterPluginPerm(name string) {
|
||||
delete(AllPluginPerms, name)
|
||||
delete(AllPluginPerms, name)
|
||||
}
|
||||
|
@ -1,24 +1,21 @@
|
||||
/*
|
||||
*
|
||||
* Gosora Phrase System
|
||||
* Copyright Azareal 2017 - 2020
|
||||
* Copyright Azareal 2017 - 2019
|
||||
*
|
||||
*/
|
||||
package phrases
|
||||
package common
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
)
|
||||
|
||||
// TODO: Add a phrase store?
|
||||
@ -38,22 +35,18 @@ type LevelPhrases struct {
|
||||
|
||||
// ! For the sake of thread safety, you must never modify a *LanguagePack directly, but to create a copy of it and overwrite the entry in the sync.Map
|
||||
type LanguagePack struct {
|
||||
Name string
|
||||
IsoCode string
|
||||
ModTime time.Time
|
||||
//LastUpdated string
|
||||
|
||||
Name string
|
||||
// Should we use a sync map or a struct for these? It would be nice, if we could keep all the phrases consistent.
|
||||
Levels LevelPhrases
|
||||
Perms map[string]string
|
||||
GlobalPerms map[string]string
|
||||
LocalPerms map[string]string
|
||||
SettingPhrases map[string]string
|
||||
PermPresets map[string]string
|
||||
Accounts map[string]string // TODO: Apply these phrases in the software proper
|
||||
UserAgents map[string]string
|
||||
OperatingSystems map[string]string
|
||||
HumanLanguages map[string]string
|
||||
Errors map[string]string // Temp stand-in
|
||||
ErrorsBytes map[string][]byte
|
||||
Errors map[string]map[string]string // map[category]map[name]value
|
||||
NoticePhrases map[string]string
|
||||
PageTitles map[string]string
|
||||
TmplPhrases map[string]string
|
||||
@ -66,58 +59,32 @@ type LanguagePack struct {
|
||||
var langPacks sync.Map // nolint it is used
|
||||
var langTmplIndicesToNames [][]string // [tmplID][index]phraseName
|
||||
|
||||
func InitPhrases(lang string) error {
|
||||
func InitPhrases() error {
|
||||
log.Print("Loading the language packs")
|
||||
err := filepath.Walk("./langs", func(path string, f os.FileInfo, err error) error {
|
||||
if f.IsDir() {
|
||||
return nil
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ext := filepath.Ext("/langs/" + path)
|
||||
if ext != ".json" {
|
||||
log.Printf("Found a '%s' in /langs/", ext)
|
||||
return nil
|
||||
}
|
||||
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var ext = filepath.Ext("/langs/" + path)
|
||||
if ext != ".json" {
|
||||
log.Printf("Found a '%s' in /langs/", ext)
|
||||
return nil
|
||||
}
|
||||
|
||||
var langPack LanguagePack
|
||||
err = json.Unmarshal(data, &langPack)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
langPack.ModTime = f.ModTime()
|
||||
|
||||
langPack.ErrorsBytes = make(map[string][]byte)
|
||||
for name, phrase := range langPack.Errors {
|
||||
langPack.ErrorsBytes[name] = []byte(phrase)
|
||||
}
|
||||
|
||||
// [prefix][name]phrase
|
||||
langPack.TmplPhrasesPrefixes = make(map[string]map[string]string)
|
||||
conMap := make(map[string]string) // Cache phrase strings so we can de-dupe items to reduce memory use. There appear to be some minor improvements with this, although we would need a more thorough check to be sure.
|
||||
for name, phrase := range langPack.TmplPhrases {
|
||||
_, ok := conMap[phrase]
|
||||
if !ok {
|
||||
conMap[phrase] = phrase
|
||||
}
|
||||
cItem := conMap[phrase]
|
||||
prefix := strings.Split(name, ".")[0]
|
||||
_, ok = langPack.TmplPhrasesPrefixes[prefix]
|
||||
if !ok {
|
||||
langPack.TmplPhrasesPrefixes[prefix] = make(map[string]string)
|
||||
}
|
||||
langPack.TmplPhrasesPrefixes[prefix][name] = cItem
|
||||
}
|
||||
|
||||
// [prefix][name]phrase
|
||||
/*langPack.TmplPhrasesPrefixes = make(map[string]map[string]string)
|
||||
for name, phrase := range langPack.TmplPhrases {
|
||||
prefix := strings.Split(name, ".")[0]
|
||||
_, ok := langPack.TmplPhrasesPrefixes[prefix]
|
||||
@ -125,21 +92,19 @@ func InitPhrases(lang string) error {
|
||||
langPack.TmplPhrasesPrefixes[prefix] = make(map[string]string)
|
||||
}
|
||||
langPack.TmplPhrasesPrefixes[prefix][name] = phrase
|
||||
}*/
|
||||
}
|
||||
|
||||
langPack.TmplIndicesToPhrases = make([][][]byte, len(langTmplIndicesToNames))
|
||||
for tmplID, phraseNames := range langTmplIndicesToNames {
|
||||
phraseSet := make([][]byte, len(phraseNames))
|
||||
var phraseSet = make([][]byte, len(phraseNames))
|
||||
for index, phraseName := range phraseNames {
|
||||
phrase, ok := langPack.TmplPhrases[phraseName]
|
||||
if !ok {
|
||||
log.Printf("langPack.TmplPhrases: %+v\n", langPack.TmplPhrases)
|
||||
panic("Couldn't find template phrase '" + phraseName + "'")
|
||||
log.Print("Couldn't find template phrase '" + phraseName + "'")
|
||||
}
|
||||
phraseSet[index] = []byte(phrase)
|
||||
}
|
||||
langPack.TmplIndicesToPhrases[tmplID] = phraseSet
|
||||
TmplIndexCallback(tmplID, phraseSet)
|
||||
}
|
||||
|
||||
log.Print("Adding the '" + langPack.Name + "' language pack")
|
||||
@ -155,9 +120,9 @@ func InitPhrases(lang string) error {
|
||||
return errors.New("You don't have any language packs")
|
||||
}
|
||||
|
||||
langPack, ok := langPacks.Load(lang)
|
||||
langPack, ok := langPacks.Load(Site.Language)
|
||||
if !ok {
|
||||
return errors.New("Couldn't find the " + lang + " language pack")
|
||||
return errors.New("Couldn't find the " + Site.Language + " language pack")
|
||||
}
|
||||
currentLangPack.Store(langPack)
|
||||
return nil
|
||||
@ -175,22 +140,18 @@ func SaveLangPack(langPack *LanguagePack) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetLangPack() *LanguagePack {
|
||||
return currentLangPack.Load().(*LanguagePack)
|
||||
}
|
||||
|
||||
func GetLevelPhrase(level int) string {
|
||||
levelPhrases := currentLangPack.Load().(*LanguagePack).Levels
|
||||
if len(levelPhrases.Levels) > 0 && level < len(levelPhrases.Levels) {
|
||||
return strings.Replace(levelPhrases.Levels[level], "{0}", strconv.Itoa(level), -1)
|
||||
}
|
||||
return strings.Replace(levelPhrases.Level, "{0}", strconv.Itoa(level), -1)
|
||||
}
|
||||
|
||||
func GetPermPhrase(name string) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).Perms[name]
|
||||
// TODO: Merge these two maps?
|
||||
func GetGlobalPermPhrase(name string) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).GlobalPerms[name]
|
||||
if !ok {
|
||||
return getPlaceholder("perms", name)
|
||||
return getPhrasePlaceholder("perms", name)
|
||||
}
|
||||
return res
|
||||
}
|
||||
func GetLocalPermPhrase(name string) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).LocalPerms[name]
|
||||
if !ok {
|
||||
return getPhrasePlaceholder("perms", name)
|
||||
}
|
||||
return res
|
||||
}
|
||||
@ -198,7 +159,7 @@ func GetPermPhrase(name string) string {
|
||||
func GetSettingPhrase(name string) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).SettingPhrases[name]
|
||||
if !ok {
|
||||
return getPlaceholder("settings", name)
|
||||
return getPhrasePlaceholder("settings", name)
|
||||
}
|
||||
return res
|
||||
}
|
||||
@ -214,7 +175,7 @@ func GetAllPermPresets() map[string]string {
|
||||
func GetAccountPhrase(name string) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).Accounts[name]
|
||||
if !ok {
|
||||
return getPlaceholder("account", name)
|
||||
return getPhrasePlaceholder("account", name)
|
||||
}
|
||||
return res
|
||||
}
|
||||
@ -238,23 +199,16 @@ func GetOSPhrase(name string) (string, bool) {
|
||||
func GetHumanLangPhrase(name string) (string, bool) {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).HumanLanguages[name]
|
||||
if !ok {
|
||||
return getPlaceholder("humanlang", name), false
|
||||
return getPhrasePlaceholder("humanlang", name), false
|
||||
}
|
||||
return res, true
|
||||
}
|
||||
|
||||
// TODO: Does comma ok work with multi-dimensional maps?
|
||||
func GetErrorPhrase(name string) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).Errors[name]
|
||||
func GetErrorPhrase(category string, name string) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).Errors[category][name]
|
||||
if !ok {
|
||||
return getPlaceholder("error", name)
|
||||
}
|
||||
return res
|
||||
}
|
||||
func GetErrorPhraseBytes(name string) []byte {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).ErrorsBytes[name]
|
||||
if !ok {
|
||||
return getPlaceholderBytes("error", name)
|
||||
return getPhrasePlaceholder("error", name)
|
||||
}
|
||||
return res
|
||||
}
|
||||
@ -262,7 +216,7 @@ func GetErrorPhraseBytes(name string) []byte {
|
||||
func GetNoticePhrase(name string) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).NoticePhrases[name]
|
||||
if !ok {
|
||||
return getPlaceholder("notices", name)
|
||||
return getPhrasePlaceholder("notices", name)
|
||||
}
|
||||
return res
|
||||
}
|
||||
@ -270,35 +224,19 @@ func GetNoticePhrase(name string) string {
|
||||
func GetTitlePhrase(name string) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).PageTitles[name]
|
||||
if !ok {
|
||||
return getPlaceholder("title", name)
|
||||
return getPhrasePlaceholder("title", name)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func GetTitlePhrasef(name string, params ...interface{}) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).PageTitles[name]
|
||||
if !ok {
|
||||
return getPlaceholder("title", name)
|
||||
}
|
||||
return fmt.Sprintf(res, params...)
|
||||
}
|
||||
|
||||
func GetTmplPhrase(name string) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).TmplPhrases[name]
|
||||
if !ok {
|
||||
return getPlaceholder("tmpl", name)
|
||||
return getPhrasePlaceholder("tmpl", name)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func GetTmplPhrasef(name string, params ...interface{}) string {
|
||||
res, ok := currentLangPack.Load().(*LanguagePack).TmplPhrases[name]
|
||||
if !ok {
|
||||
return getPlaceholder("tmpl", name)
|
||||
}
|
||||
return fmt.Sprintf(res, params...)
|
||||
}
|
||||
|
||||
func GetTmplPhrases() map[string]string {
|
||||
return currentLangPack.Load().(*LanguagePack).TmplPhrases
|
||||
}
|
||||
@ -308,17 +246,9 @@ func GetTmplPhrasesByPrefix(prefix string) (phrases map[string]string, ok bool)
|
||||
return res, ok
|
||||
}
|
||||
|
||||
func getPlaceholder(prefix, suffix string) string {
|
||||
func getPhrasePlaceholder(prefix string, suffix string) string {
|
||||
return "{lang." + prefix + "[" + suffix + "]}"
|
||||
}
|
||||
func getPlaceholderBytes(prefix, suffix string) []byte {
|
||||
return []byte("{lang." + prefix + "[" + suffix + "]}")
|
||||
}
|
||||
|
||||
// ! Please don't mutate *LanguagePack
|
||||
func GetCurrentLangPack() *LanguagePack {
|
||||
return currentLangPack.Load().(*LanguagePack)
|
||||
}
|
||||
|
||||
// ? - Use runtime reflection for updating phrases?
|
||||
// TODO: Implement these
|
||||
@ -365,15 +295,3 @@ func RegisterTmplPhraseNames(phraseNames []string) (tmplID int) {
|
||||
func GetTmplPhrasesBytes(tmplID int) [][]byte {
|
||||
return currentLangPack.Load().(*LanguagePack).TmplIndicesToPhrases[tmplID]
|
||||
}
|
||||
|
||||
// New
|
||||
|
||||
var indexCallbacks []func([][]byte)
|
||||
|
||||
func TmplIndexCallback(tmplID int, phraseSet [][]byte) {
|
||||
indexCallbacks[tmplID](phraseSet)
|
||||
}
|
||||
|
||||
func AddTmplIndexCallback(h func([][]byte)) {
|
||||
indexCallbacks = append(indexCallbacks, h)
|
||||
}
|
@ -1,122 +1,120 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
var pluginLangs = make(map[string]PluginLang)
|
||||
|
||||
// For non-native plugins to bind JSON files to. E.g. JS and Lua
|
||||
type PluginMeta struct {
|
||||
UName string
|
||||
Name string
|
||||
Author string
|
||||
URL string
|
||||
Settings string
|
||||
Tag string
|
||||
UName string
|
||||
Name string
|
||||
Author string
|
||||
URL string
|
||||
Settings string
|
||||
Tag string
|
||||
|
||||
Skip bool // Skip this folder?
|
||||
Main string // The main file
|
||||
Hooks map[string]string // Hooks mapped to functions
|
||||
Skip bool // Skip this folder?
|
||||
Main string // The main file
|
||||
Hooks map[string]string // Hooks mapped to functions
|
||||
}
|
||||
|
||||
type PluginLang interface {
|
||||
GetName() string
|
||||
GetExts() []string
|
||||
GetName() string
|
||||
GetExts() []string
|
||||
|
||||
Init() error
|
||||
AddPlugin(meta PluginMeta) (*Plugin, error)
|
||||
//AddHook(name string, handler interface{}) error
|
||||
//RemoveHook(name string, handler interface{})
|
||||
//RunHook(name string, data interface{}) interface{}
|
||||
//RunVHook(name string data ...interface{}) interface{}
|
||||
Init() error
|
||||
AddPlugin(meta PluginMeta) (*Plugin, error)
|
||||
//AddHook(name string, handler interface{}) error
|
||||
//RemoveHook(name string, handler interface{})
|
||||
//RunHook(name string, data interface{}) interface{}
|
||||
//RunVHook(name string data ...interface{}) interface{}
|
||||
}
|
||||
|
||||
/*
|
||||
var ext = filepath.Ext(pluginFile.Name())
|
||||
if ext == ".txt" || ext == ".go" {
|
||||
continue
|
||||
continue
|
||||
}
|
||||
*/
|
||||
|
||||
func InitPluginLangs() error {
|
||||
for _, pluginLang := range pluginLangs {
|
||||
pluginLang.Init()
|
||||
}
|
||||
pluginList, err := GetPluginFiles()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, pluginLang := range pluginLangs {
|
||||
pluginLang.Init()
|
||||
}
|
||||
|
||||
for _, pluginItem := range pluginList {
|
||||
pluginFile, err := ioutil.ReadFile("./extend/" + pluginItem + "/plugin.json")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pluginList, err := GetPluginFiles()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var plugin PluginMeta
|
||||
err = json.Unmarshal(pluginFile, &plugin)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if plugin.Skip {
|
||||
continue
|
||||
}
|
||||
for _, pluginItem := range pluginList {
|
||||
pluginFile, err := ioutil.ReadFile("./extend/" + pluginItem + "/plugin.json")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
e := func(field, name string) error {
|
||||
return errors.New("The " + field + " field must not be blank on plugin '" + name + "'")
|
||||
}
|
||||
if plugin.UName == "" {
|
||||
return e("UName", pluginItem)
|
||||
}
|
||||
if plugin.Name == "" {
|
||||
return e("Name", pluginItem)
|
||||
}
|
||||
if plugin.Author == "" {
|
||||
return e("Author", pluginItem)
|
||||
}
|
||||
if plugin.Main == "" {
|
||||
return errors.New("Couldn't find a main file for plugin '" + pluginItem + "'")
|
||||
}
|
||||
var plugin PluginMeta
|
||||
err = json.Unmarshal(pluginFile, &plugin)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if plugin.Skip {
|
||||
continue
|
||||
}
|
||||
|
||||
ext := filepath.Ext(plugin.Main)
|
||||
pluginLang, err := ExtToPluginLang(ext)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pplugin, err := pluginLang.AddPlugin(plugin)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
Plugins[plugin.UName] = pplugin
|
||||
}
|
||||
return nil
|
||||
if plugin.UName == "" {
|
||||
return errors.New("The UName field must not be blank on plugin '" + pluginItem + "'")
|
||||
}
|
||||
if plugin.Name == "" {
|
||||
return errors.New("The Name field must not be blank on plugin '" + pluginItem + "'")
|
||||
}
|
||||
if plugin.Author == "" {
|
||||
return errors.New("The Author field must not be blank on plugin '" + pluginItem + "'")
|
||||
}
|
||||
if plugin.Main == "" {
|
||||
return errors.New("Couldn't find a main file for plugin '" + pluginItem + "'")
|
||||
}
|
||||
|
||||
var ext = filepath.Ext(plugin.Main)
|
||||
pluginLang, err := ExtToPluginLang(ext)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pplugin, err := pluginLang.AddPlugin(plugin)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
Plugins[plugin.UName] = pplugin
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetPluginFiles() (pluginList []string, err error) {
|
||||
pluginFiles, err := ioutil.ReadDir("./extend")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, pluginFile := range pluginFiles {
|
||||
if !pluginFile.IsDir() {
|
||||
continue
|
||||
}
|
||||
pluginList = append(pluginList, pluginFile.Name())
|
||||
}
|
||||
return pluginList, nil
|
||||
pluginFiles, err := ioutil.ReadDir("./extend")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, pluginFile := range pluginFiles {
|
||||
if !pluginFile.IsDir() {
|
||||
continue
|
||||
}
|
||||
pluginList = append(pluginList, pluginFile.Name())
|
||||
}
|
||||
return pluginList, nil
|
||||
}
|
||||
|
||||
func ExtToPluginLang(ext string) (PluginLang, error) {
|
||||
for _, pluginLang := range pluginLangs {
|
||||
for _, registeredExt := range pluginLang.GetExts() {
|
||||
if registeredExt == ext {
|
||||
return pluginLang, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, errors.New("No plugin lang handlers are capable of handling extension '" + ext + "'")
|
||||
for _, pluginLang := range pluginLangs {
|
||||
for _, registeredExt := range pluginLang.GetExts() {
|
||||
if registeredExt == ext {
|
||||
return pluginLang, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, errors.New("No plugin lang handlers are capable of handling extension '" + ext + "'")
|
||||
}
|
||||
|
109
common/poll.go
109
common/poll.go
@ -1,109 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var pollStmts PollStmts
|
||||
|
||||
type Poll struct {
|
||||
ID int
|
||||
ParentID int
|
||||
ParentTable string
|
||||
Type int // 0: Single choice, 1: Multiple choice, 2: Multiple choice w/ points
|
||||
AntiCheat bool // Apply various mitigations for cheating
|
||||
// GroupPower map[gid]points // The number of points a group can spend in this poll, defaults to 1
|
||||
|
||||
Options map[int]string
|
||||
Results map[int]int // map[optionIndex]points
|
||||
QuickOptions []PollOption // TODO: Fix up the template transpiler so we don't need to use this hack anymore
|
||||
VoteCount int
|
||||
}
|
||||
|
||||
// TODO: Use a transaction for this?
|
||||
// TODO: Add a voters table with castAt / IP data and only populate it when poll anti-cheat is on
|
||||
func (p *Poll) CastVote(optionIndex, uid int, ip string) error {
|
||||
if Config.DisablePollIP || !p.AntiCheat {
|
||||
ip = ""
|
||||
}
|
||||
_, e := pollStmts.addVote.Exec(p.ID, uid, optionIndex, ip)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
_, e = pollStmts.incVoteCount.Exec(p.ID)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
_, e = pollStmts.incVoteCountForOption.Exec(optionIndex, p.ID)
|
||||
return e
|
||||
}
|
||||
|
||||
func (p *Poll) Delete() error {
|
||||
_, e := pollStmts.deletePollVotes.Exec(p.ID)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
_, e = pollStmts.deletePollOptions.Exec(p.ID)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
_, e = pollStmts.deletePoll.Exec(p.ID)
|
||||
_ = Polls.GetCache().Remove(p.ID)
|
||||
return e
|
||||
}
|
||||
|
||||
func (p *Poll) Resultsf(f func(votes int) error) error {
|
||||
rows, e := pollStmts.getResults.Query(p.ID)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var votes int
|
||||
for rows.Next() {
|
||||
if e := rows.Scan(&votes); e != nil {
|
||||
return e
|
||||
}
|
||||
if e := f(votes); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return rows.Err()
|
||||
}
|
||||
|
||||
func (p *Poll) Copy() Poll {
|
||||
return *p
|
||||
}
|
||||
|
||||
type PollStmts struct {
|
||||
getResults *sql.Stmt
|
||||
|
||||
addVote *sql.Stmt
|
||||
incVoteCount *sql.Stmt
|
||||
incVoteCountForOption *sql.Stmt
|
||||
|
||||
deletePoll *sql.Stmt
|
||||
deletePollOptions *sql.Stmt
|
||||
deletePollVotes *sql.Stmt
|
||||
}
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
p := "polls"
|
||||
wh := "pollID=?"
|
||||
pollStmts = PollStmts{
|
||||
getResults: acc.Select("polls_options").Columns("votes").Where("pollID=?").Orderby("option ASC").Prepare(),
|
||||
|
||||
addVote: acc.Insert("polls_votes").Columns("pollID,uid,option,castAt,ip").Fields("?,?,?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
incVoteCount: acc.Update(p).Set("votes=votes+1").Where(wh).Prepare(),
|
||||
incVoteCountForOption: acc.Update("polls_options").Set("votes=votes+1").Where("option=? AND pollID=?").Prepare(),
|
||||
|
||||
deletePoll: acc.Delete(p).Where(wh).Prepare(),
|
||||
deletePollOptions: acc.Delete("polls_options").Where(wh).Prepare(),
|
||||
deletePollVotes: acc.Delete("polls_votes").Where(wh).Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
@ -1,165 +1,164 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
)
|
||||
|
||||
// PollCache is an interface which spits out polls from a fast cache rather than the database, whether from memory or from an application like Redis. Polls may not be present in the cache but may be in the database
|
||||
type PollCache interface {
|
||||
Get(id int) (*Poll, error)
|
||||
GetUnsafe(id int) (*Poll, error)
|
||||
BulkGet(ids []int) (list []*Poll)
|
||||
Set(item *Poll) error
|
||||
Add(item *Poll) error
|
||||
AddUnsafe(item *Poll) error
|
||||
Remove(id int) error
|
||||
RemoveUnsafe(id int) error
|
||||
Flush()
|
||||
Length() int
|
||||
SetCapacity(capacity int)
|
||||
GetCapacity() int
|
||||
Get(id int) (*Poll, error)
|
||||
GetUnsafe(id int) (*Poll, error)
|
||||
BulkGet(ids []int) (list []*Poll)
|
||||
Set(item *Poll) error
|
||||
Add(item *Poll) error
|
||||
AddUnsafe(item *Poll) error
|
||||
Remove(id int) error
|
||||
RemoveUnsafe(id int) error
|
||||
Flush()
|
||||
Length() int
|
||||
SetCapacity(capacity int)
|
||||
GetCapacity() int
|
||||
}
|
||||
|
||||
// MemoryPollCache stores and pulls polls out of the current process' memory
|
||||
type MemoryPollCache struct {
|
||||
items map[int]*Poll
|
||||
length int64
|
||||
capacity int
|
||||
items map[int]*Poll
|
||||
length int64
|
||||
capacity int
|
||||
|
||||
sync.RWMutex
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
// NewMemoryPollCache gives you a new instance of MemoryPollCache
|
||||
func NewMemoryPollCache(capacity int) *MemoryPollCache {
|
||||
return &MemoryPollCache{
|
||||
items: make(map[int]*Poll),
|
||||
capacity: capacity,
|
||||
}
|
||||
return &MemoryPollCache{
|
||||
items: make(map[int]*Poll),
|
||||
capacity: capacity,
|
||||
}
|
||||
}
|
||||
|
||||
// Get fetches a poll by ID. Returns ErrNoRows if not present.
|
||||
func (s *MemoryPollCache) Get(id int) (*Poll, error) {
|
||||
s.RLock()
|
||||
item, ok := s.items[id]
|
||||
s.RUnlock()
|
||||
if ok {
|
||||
return item, nil
|
||||
}
|
||||
return item, ErrNoRows
|
||||
func (mus *MemoryPollCache) Get(id int) (*Poll, error) {
|
||||
mus.RLock()
|
||||
item, ok := mus.items[id]
|
||||
mus.RUnlock()
|
||||
if ok {
|
||||
return item, nil
|
||||
}
|
||||
return item, ErrNoRows
|
||||
}
|
||||
|
||||
// BulkGet fetches multiple polls by their IDs. Indices without polls will be set to nil, so make sure you check for those, we might want to change this behaviour to make it less confusing.
|
||||
func (s *MemoryPollCache) BulkGet(ids []int) (list []*Poll) {
|
||||
list = make([]*Poll, len(ids))
|
||||
s.RLock()
|
||||
for i, id := range ids {
|
||||
list[i] = s.items[id]
|
||||
}
|
||||
s.RUnlock()
|
||||
return list
|
||||
func (mus *MemoryPollCache) BulkGet(ids []int) (list []*Poll) {
|
||||
list = make([]*Poll, len(ids))
|
||||
mus.RLock()
|
||||
for i, id := range ids {
|
||||
list[i] = mus.items[id]
|
||||
}
|
||||
mus.RUnlock()
|
||||
return list
|
||||
}
|
||||
|
||||
// GetUnsafe fetches a poll by ID. Returns ErrNoRows if not present. THIS METHOD IS NOT THREAD-SAFE.
|
||||
func (s *MemoryPollCache) GetUnsafe(id int) (*Poll, error) {
|
||||
item, ok := s.items[id]
|
||||
if ok {
|
||||
return item, nil
|
||||
}
|
||||
return item, ErrNoRows
|
||||
func (mus *MemoryPollCache) GetUnsafe(id int) (*Poll, error) {
|
||||
item, ok := mus.items[id]
|
||||
if ok {
|
||||
return item, nil
|
||||
}
|
||||
return item, ErrNoRows
|
||||
}
|
||||
|
||||
// Set overwrites the value of a poll in the cache, whether it's present or not. May return a capacity overflow error.
|
||||
func (s *MemoryPollCache) Set(item *Poll) error {
|
||||
s.Lock()
|
||||
user, ok := s.items[item.ID]
|
||||
if ok {
|
||||
s.Unlock()
|
||||
*user = *item
|
||||
} else if int(s.length) >= s.capacity {
|
||||
s.Unlock()
|
||||
return ErrStoreCapacityOverflow
|
||||
} else {
|
||||
s.items[item.ID] = item
|
||||
s.Unlock()
|
||||
atomic.AddInt64(&s.length, 1)
|
||||
}
|
||||
return nil
|
||||
func (mus *MemoryPollCache) Set(item *Poll) error {
|
||||
mus.Lock()
|
||||
user, ok := mus.items[item.ID]
|
||||
if ok {
|
||||
mus.Unlock()
|
||||
*user = *item
|
||||
} else if int(mus.length) >= mus.capacity {
|
||||
mus.Unlock()
|
||||
return ErrStoreCapacityOverflow
|
||||
} else {
|
||||
mus.items[item.ID] = item
|
||||
mus.Unlock()
|
||||
atomic.AddInt64(&mus.length, 1)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Add adds a poll to the cache, similar to Set, but it's only intended for new items. This method might be deprecated in the near future, use Set. May return a capacity overflow error.
|
||||
// ? Is this redundant if we have Set? Are the efficiency wins worth this? Is this even used?
|
||||
func (s *MemoryPollCache) Add(item *Poll) error {
|
||||
s.Lock()
|
||||
if int(s.length) >= s.capacity {
|
||||
s.Unlock()
|
||||
return ErrStoreCapacityOverflow
|
||||
}
|
||||
s.items[item.ID] = item
|
||||
s.length = int64(len(s.items))
|
||||
s.Unlock()
|
||||
return nil
|
||||
func (mus *MemoryPollCache) Add(item *Poll) error {
|
||||
mus.Lock()
|
||||
if int(mus.length) >= mus.capacity {
|
||||
mus.Unlock()
|
||||
return ErrStoreCapacityOverflow
|
||||
}
|
||||
mus.items[item.ID] = item
|
||||
mus.length = int64(len(mus.items))
|
||||
mus.Unlock()
|
||||
return nil
|
||||
}
|
||||
|
||||
// AddUnsafe is the unsafe version of Add. May return a capacity overflow error. THIS METHOD IS NOT THREAD-SAFE.
|
||||
func (s *MemoryPollCache) AddUnsafe(item *Poll) error {
|
||||
if int(s.length) >= s.capacity {
|
||||
return ErrStoreCapacityOverflow
|
||||
}
|
||||
s.items[item.ID] = item
|
||||
s.length = int64(len(s.items))
|
||||
return nil
|
||||
func (mus *MemoryPollCache) AddUnsafe(item *Poll) error {
|
||||
if int(mus.length) >= mus.capacity {
|
||||
return ErrStoreCapacityOverflow
|
||||
}
|
||||
mus.items[item.ID] = item
|
||||
mus.length = int64(len(mus.items))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Remove removes a poll from the cache by ID, if they exist. Returns ErrNoRows if no items exist.
|
||||
func (s *MemoryPollCache) Remove(id int) error {
|
||||
s.Lock()
|
||||
_, ok := s.items[id]
|
||||
if !ok {
|
||||
s.Unlock()
|
||||
return ErrNoRows
|
||||
}
|
||||
delete(s.items, id)
|
||||
s.Unlock()
|
||||
atomic.AddInt64(&s.length, -1)
|
||||
return nil
|
||||
func (mus *MemoryPollCache) Remove(id int) error {
|
||||
mus.Lock()
|
||||
_, ok := mus.items[id]
|
||||
if !ok {
|
||||
mus.Unlock()
|
||||
return ErrNoRows
|
||||
}
|
||||
delete(mus.items, id)
|
||||
mus.Unlock()
|
||||
atomic.AddInt64(&mus.length, -1)
|
||||
return nil
|
||||
}
|
||||
|
||||
// RemoveUnsafe is the unsafe version of Remove. THIS METHOD IS NOT THREAD-SAFE.
|
||||
func (s *MemoryPollCache) RemoveUnsafe(id int) error {
|
||||
_, ok := s.items[id]
|
||||
if !ok {
|
||||
return ErrNoRows
|
||||
}
|
||||
delete(s.items, id)
|
||||
atomic.AddInt64(&s.length, -1)
|
||||
return nil
|
||||
func (mus *MemoryPollCache) RemoveUnsafe(id int) error {
|
||||
_, ok := mus.items[id]
|
||||
if !ok {
|
||||
return ErrNoRows
|
||||
}
|
||||
delete(mus.items, id)
|
||||
atomic.AddInt64(&mus.length, -1)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Flush removes all the polls from the cache, useful for tests.
|
||||
func (s *MemoryPollCache) Flush() {
|
||||
m := make(map[int]*Poll)
|
||||
s.Lock()
|
||||
s.items = m
|
||||
s.length = 0
|
||||
s.Unlock()
|
||||
func (mus *MemoryPollCache) Flush() {
|
||||
mus.Lock()
|
||||
mus.items = make(map[int]*Poll)
|
||||
mus.length = 0
|
||||
mus.Unlock()
|
||||
}
|
||||
|
||||
// ! Is this concurrent?
|
||||
// Length returns the number of polls in the memory cache
|
||||
func (s *MemoryPollCache) Length() int {
|
||||
return int(s.length)
|
||||
func (mus *MemoryPollCache) Length() int {
|
||||
return int(mus.length)
|
||||
}
|
||||
|
||||
// SetCapacity sets the maximum number of polls which this cache can hold
|
||||
func (s *MemoryPollCache) SetCapacity(capacity int) {
|
||||
// Ints are moved in a single instruction, so this should be thread-safe
|
||||
s.capacity = capacity
|
||||
func (mus *MemoryPollCache) SetCapacity(capacity int) {
|
||||
// Ints are moved in a single instruction, so this should be thread-safe
|
||||
mus.capacity = capacity
|
||||
}
|
||||
|
||||
// GetCapacity returns the maximum number of polls this cache can hold
|
||||
func (s *MemoryPollCache) GetCapacity() int {
|
||||
return s.capacity
|
||||
func (mus *MemoryPollCache) GetCapacity() int {
|
||||
return mus.capacity
|
||||
}
|
||||
|
||||
// NullPollCache is a poll cache to be used when you don't want a cache and just want queries to passthrough to the database
|
||||
@ -168,41 +167,41 @@ type NullPollCache struct {
|
||||
|
||||
// NewNullPollCache gives you a new instance of NullPollCache
|
||||
func NewNullPollCache() *NullPollCache {
|
||||
return &NullPollCache{}
|
||||
return &NullPollCache{}
|
||||
}
|
||||
|
||||
// nolint
|
||||
func (s *NullPollCache) Get(id int) (*Poll, error) {
|
||||
return nil, ErrNoRows
|
||||
func (mus *NullPollCache) Get(id int) (*Poll, error) {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
func (s *NullPollCache) BulkGet(ids []int) (list []*Poll) {
|
||||
return make([]*Poll, len(ids))
|
||||
func (mus *NullPollCache) BulkGet(ids []int) (list []*Poll) {
|
||||
return make([]*Poll, len(ids))
|
||||
}
|
||||
func (s *NullPollCache) GetUnsafe(id int) (*Poll, error) {
|
||||
return nil, ErrNoRows
|
||||
func (mus *NullPollCache) GetUnsafe(id int) (*Poll, error) {
|
||||
return nil, ErrNoRows
|
||||
}
|
||||
func (s *NullPollCache) Set(_ *Poll) error {
|
||||
return nil
|
||||
func (mus *NullPollCache) Set(_ *Poll) error {
|
||||
return nil
|
||||
}
|
||||
func (s *NullPollCache) Add(_ *Poll) error {
|
||||
return nil
|
||||
func (mus *NullPollCache) Add(_ *Poll) error {
|
||||
return nil
|
||||
}
|
||||
func (s *NullPollCache) AddUnsafe(_ *Poll) error {
|
||||
return nil
|
||||
func (mus *NullPollCache) AddUnsafe(_ *Poll) error {
|
||||
return nil
|
||||
}
|
||||
func (s *NullPollCache) Remove(id int) error {
|
||||
return nil
|
||||
func (mus *NullPollCache) Remove(id int) error {
|
||||
return nil
|
||||
}
|
||||
func (s *NullPollCache) RemoveUnsafe(id int) error {
|
||||
return nil
|
||||
func (mus *NullPollCache) RemoveUnsafe(id int) error {
|
||||
return nil
|
||||
}
|
||||
func (s *NullPollCache) Flush() {
|
||||
func (mus *NullPollCache) Flush() {
|
||||
}
|
||||
func (s *NullPollCache) Length() int {
|
||||
return 0
|
||||
func (mus *NullPollCache) Length() int {
|
||||
return 0
|
||||
}
|
||||
func (s *NullPollCache) SetCapacity(_ int) {
|
||||
func (mus *NullPollCache) SetCapacity(_ int) {
|
||||
}
|
||||
func (s *NullPollCache) GetCapacity() int {
|
||||
return 0
|
||||
func (mus *NullPollCache) GetCapacity() int {
|
||||
return 0
|
||||
}
|
||||
|
@ -7,243 +7,281 @@ import (
|
||||
"log"
|
||||
"strconv"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var Polls PollStore
|
||||
|
||||
type Poll struct {
|
||||
ID int
|
||||
ParentID int
|
||||
ParentTable string
|
||||
Type int // 0: Single choice, 1: Multiple choice, 2: Multiple choice w/ points
|
||||
//AntiCheat bool // Apply various mitigations for cheating
|
||||
// GroupPower map[gid]points // The number of points a group can spend in this poll, defaults to 1
|
||||
|
||||
Options map[int]string
|
||||
Results map[int]int // map[optionIndex]points
|
||||
QuickOptions []PollOption // TODO: Fix up the template transpiler so we don't need to use this hack anymore
|
||||
VoteCount int
|
||||
}
|
||||
|
||||
func (poll *Poll) CastVote(optionIndex int, uid int, ipaddress string) error {
|
||||
return Polls.CastVote(optionIndex, poll.ID, uid, ipaddress) // TODO: Move the query into a pollStmts rather than having it in the store
|
||||
}
|
||||
|
||||
func (poll *Poll) Copy() Poll {
|
||||
return *poll
|
||||
}
|
||||
|
||||
type PollOption struct {
|
||||
ID int
|
||||
Value string
|
||||
ID int
|
||||
Value string
|
||||
}
|
||||
|
||||
type Pollable interface {
|
||||
GetID() int
|
||||
GetTable() string
|
||||
SetPoll(pollID int) error
|
||||
GetID() int
|
||||
GetTable() string
|
||||
SetPoll(pollID int) error
|
||||
}
|
||||
|
||||
type PollStore interface {
|
||||
Get(id int) (*Poll, error)
|
||||
Exists(id int) bool
|
||||
ClearIPs() error
|
||||
Create(parent Pollable, pollType int, pollOptions map[int]string) (int, error)
|
||||
Reload(id int) error
|
||||
Count() int
|
||||
Get(id int) (*Poll, error)
|
||||
Exists(id int) bool
|
||||
Create(parent Pollable, pollType int, pollOptions map[int]string) (int, error)
|
||||
CastVote(optionIndex int, pollID int, uid int, ipaddress string) error
|
||||
Reload(id int) error
|
||||
//GlobalCount() int
|
||||
|
||||
SetCache(cache PollCache)
|
||||
GetCache() PollCache
|
||||
SetCache(cache PollCache)
|
||||
GetCache() PollCache
|
||||
}
|
||||
|
||||
type DefaultPollStore struct {
|
||||
cache PollCache
|
||||
cache PollCache
|
||||
|
||||
get *sql.Stmt
|
||||
exists *sql.Stmt
|
||||
createPoll *sql.Stmt
|
||||
createPollOption *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
count *sql.Stmt
|
||||
|
||||
clearIPs *sql.Stmt
|
||||
get *sql.Stmt
|
||||
exists *sql.Stmt
|
||||
createPoll *sql.Stmt
|
||||
createPollOption *sql.Stmt
|
||||
addVote *sql.Stmt
|
||||
incrementVoteCount *sql.Stmt
|
||||
incrementVoteCountForOption *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
//pollCount *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultPollStore(cache PollCache) (*DefaultPollStore, error) {
|
||||
acc := qgen.NewAcc()
|
||||
if cache == nil {
|
||||
cache = NewNullPollCache()
|
||||
}
|
||||
// TODO: Add an admin version of registerStmt with more flexibility?
|
||||
p := "polls"
|
||||
return &DefaultPollStore{
|
||||
cache: cache,
|
||||
get: acc.Select(p).Columns("parentID,parentTable,type,options,votes").Where("pollID=?").Stmt(),
|
||||
exists: acc.Select(p).Columns("pollID").Where("pollID=?").Stmt(),
|
||||
createPoll: acc.Insert(p).Columns("parentID,parentTable,type,options").Fields("?,?,?,?").Prepare(),
|
||||
createPollOption: acc.Insert("polls_options").Columns("pollID,option,votes").Fields("?,?,0").Prepare(),
|
||||
count: acc.Count(p).Prepare(),
|
||||
|
||||
clearIPs: acc.Update("polls_votes").Set("ip=''").Where("ip!=''").Stmt(),
|
||||
}, acc.FirstError()
|
||||
acc := qgen.NewAcc()
|
||||
if cache == nil {
|
||||
cache = NewNullPollCache()
|
||||
}
|
||||
// TODO: Add an admin version of registerStmt with more flexibility?
|
||||
return &DefaultPollStore{
|
||||
cache: cache,
|
||||
get: acc.Select("polls").Columns("parentID, parentTable, type, options, votes").Where("pollID = ?").Prepare(),
|
||||
exists: acc.Select("polls").Columns("pollID").Where("pollID = ?").Prepare(),
|
||||
createPoll: acc.Insert("polls").Columns("parentID, parentTable, type, options").Fields("?,?,?,?").Prepare(),
|
||||
createPollOption: acc.Insert("polls_options").Columns("pollID, option, votes").Fields("?,?,0").Prepare(),
|
||||
addVote: acc.Insert("polls_votes").Columns("pollID, uid, option, castAt, ipaddress").Fields("?,?,?,UTC_TIMESTAMP(),?").Prepare(),
|
||||
incrementVoteCount: acc.Update("polls").Set("votes = votes + 1").Where("pollID = ?").Prepare(),
|
||||
incrementVoteCountForOption: acc.Update("polls_options").Set("votes = votes + 1").Where("option = ? AND pollID = ?").Prepare(),
|
||||
//pollCount: acc.SimpleCount("polls", "", ""),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultPollStore) Exists(id int) bool {
|
||||
e := s.exists.QueryRow(id).Scan(&id)
|
||||
if e != nil && e != ErrNoRows {
|
||||
LogError(e)
|
||||
}
|
||||
return e != ErrNoRows
|
||||
func (store *DefaultPollStore) Exists(id int) bool {
|
||||
err := store.exists.QueryRow(id).Scan(&id)
|
||||
if err != nil && err != ErrNoRows {
|
||||
LogError(err)
|
||||
}
|
||||
return err != ErrNoRows
|
||||
}
|
||||
|
||||
func (s *DefaultPollStore) Get(id int) (*Poll, error) {
|
||||
p, err := s.cache.Get(id)
|
||||
if err == nil {
|
||||
return p, nil
|
||||
}
|
||||
func (store *DefaultPollStore) Get(id int) (*Poll, error) {
|
||||
poll, err := store.cache.Get(id)
|
||||
if err == nil {
|
||||
return poll, nil
|
||||
}
|
||||
|
||||
p = &Poll{ID: id}
|
||||
var optionTxt []byte
|
||||
err = s.get.QueryRow(id).Scan(&p.ParentID, &p.ParentTable, &p.Type, &optionTxt, &p.VoteCount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
poll = &Poll{ID: id}
|
||||
var optionTxt []byte
|
||||
err = store.get.QueryRow(id).Scan(&poll.ParentID, &poll.ParentTable, &poll.Type, &optionTxt, &poll.VoteCount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(optionTxt, &p.Options)
|
||||
if err == nil {
|
||||
p.QuickOptions = s.unpackOptionsMap(p.Options)
|
||||
s.cache.Set(p)
|
||||
}
|
||||
return p, err
|
||||
err = json.Unmarshal(optionTxt, &poll.Options)
|
||||
if err == nil {
|
||||
poll.QuickOptions = store.unpackOptionsMap(poll.Options)
|
||||
store.cache.Set(poll)
|
||||
}
|
||||
return poll, err
|
||||
}
|
||||
|
||||
// TODO: Optimise the query to avoid preparing it on the spot? Maybe, use knowledge of the most common IN() parameter counts?
|
||||
// TODO: ID of 0 should always error?
|
||||
func (s *DefaultPollStore) BulkGetMap(ids []int) (list map[int]*Poll, err error) {
|
||||
idCount := len(ids)
|
||||
list = make(map[int]*Poll)
|
||||
if idCount == 0 {
|
||||
return list, nil
|
||||
}
|
||||
func (store *DefaultPollStore) BulkGetMap(ids []int) (list map[int]*Poll, err error) {
|
||||
var idCount = len(ids)
|
||||
list = make(map[int]*Poll)
|
||||
if idCount == 0 {
|
||||
return list, nil
|
||||
}
|
||||
|
||||
var stillHere []int
|
||||
sliceList := s.cache.BulkGet(ids)
|
||||
for i, sliceItem := range sliceList {
|
||||
if sliceItem != nil {
|
||||
list[sliceItem.ID] = sliceItem
|
||||
} else {
|
||||
stillHere = append(stillHere, ids[i])
|
||||
}
|
||||
}
|
||||
ids = stillHere
|
||||
var stillHere []int
|
||||
sliceList := store.cache.BulkGet(ids)
|
||||
for i, sliceItem := range sliceList {
|
||||
if sliceItem != nil {
|
||||
list[sliceItem.ID] = sliceItem
|
||||
} else {
|
||||
stillHere = append(stillHere, ids[i])
|
||||
}
|
||||
}
|
||||
ids = stillHere
|
||||
|
||||
// If every user is in the cache, then return immediately
|
||||
if len(ids) == 0 {
|
||||
return list, nil
|
||||
}
|
||||
// If every user is in the cache, then return immediately
|
||||
if len(ids) == 0 {
|
||||
return list, nil
|
||||
}
|
||||
|
||||
idList, q := inqbuild(ids)
|
||||
rows, err := qgen.NewAcc().Select("polls").Columns("pollID,parentID,parentTable,type,options,votes").Where("pollID IN(" + q + ")").Query(idList...)
|
||||
if err != nil {
|
||||
return list, err
|
||||
}
|
||||
// TODO: Add a function for the qlist stuff
|
||||
var qlist string
|
||||
var pollIDList []interface{}
|
||||
for _, id := range ids {
|
||||
pollIDList = append(pollIDList, strconv.Itoa(id))
|
||||
qlist += "?,"
|
||||
}
|
||||
qlist = qlist[0 : len(qlist)-1]
|
||||
|
||||
for rows.Next() {
|
||||
p := &Poll{ID: 0}
|
||||
var optionTxt []byte
|
||||
err := rows.Scan(&p.ID, &p.ParentID, &p.ParentTable, &p.Type, &optionTxt, &p.VoteCount)
|
||||
if err != nil {
|
||||
return list, err
|
||||
}
|
||||
rows, err := qgen.NewAcc().Select("polls").Columns("pollID, parentID, parentTable, type, options, votes").Where("pollID IN(" + qlist + ")").Query(pollIDList...)
|
||||
if err != nil {
|
||||
return list, err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(optionTxt, &p.Options)
|
||||
if err != nil {
|
||||
return list, err
|
||||
}
|
||||
p.QuickOptions = s.unpackOptionsMap(p.Options)
|
||||
s.cache.Set(p)
|
||||
for rows.Next() {
|
||||
poll := &Poll{ID: 0}
|
||||
var optionTxt []byte
|
||||
err := rows.Scan(&poll.ID, &poll.ParentID, &poll.ParentTable, &poll.Type, &optionTxt, &poll.VoteCount)
|
||||
if err != nil {
|
||||
return list, err
|
||||
}
|
||||
|
||||
list[p.ID] = p
|
||||
}
|
||||
err = json.Unmarshal(optionTxt, &poll.Options)
|
||||
if err != nil {
|
||||
return list, err
|
||||
}
|
||||
poll.QuickOptions = store.unpackOptionsMap(poll.Options)
|
||||
store.cache.Set(poll)
|
||||
|
||||
// Did we miss any polls?
|
||||
if idCount > len(list) {
|
||||
var sidList string
|
||||
for _, id := range ids {
|
||||
if _, ok := list[id]; !ok {
|
||||
sidList += strconv.Itoa(id) + ","
|
||||
}
|
||||
}
|
||||
list[poll.ID] = poll
|
||||
}
|
||||
|
||||
// We probably don't need this, but it might be useful in case of bugs in BulkCascadeGetMap
|
||||
if sidList == "" {
|
||||
// TODO: Bulk log this
|
||||
if Dev.DebugMode {
|
||||
log.Print("This data is sampled later in the BulkCascadeGetMap function, so it might miss the cached IDs")
|
||||
log.Print("idCount", idCount)
|
||||
log.Print("ids", ids)
|
||||
log.Print("list", list)
|
||||
}
|
||||
return list, errors.New("We weren't able to find a poll, but we don't know which one")
|
||||
}
|
||||
sidList = sidList[0 : len(sidList)-1]
|
||||
// Did we miss any polls?
|
||||
if idCount > len(list) {
|
||||
var sidList string
|
||||
for _, id := range ids {
|
||||
_, ok := list[id]
|
||||
if !ok {
|
||||
sidList += strconv.Itoa(id) + ","
|
||||
}
|
||||
}
|
||||
|
||||
err = errors.New("Unable to find the polls with the following IDs: " + sidList)
|
||||
}
|
||||
// We probably don't need this, but it might be useful in case of bugs in BulkCascadeGetMap
|
||||
if sidList == "" {
|
||||
// TODO: Bulk log this
|
||||
if Dev.DebugMode {
|
||||
log.Print("This data is sampled later in the BulkCascadeGetMap function, so it might miss the cached IDs")
|
||||
log.Print("idCount", idCount)
|
||||
log.Print("ids", ids)
|
||||
log.Print("list", list)
|
||||
}
|
||||
return list, errors.New("We weren't able to find a poll, but we don't know which one")
|
||||
}
|
||||
sidList = sidList[0 : len(sidList)-1]
|
||||
|
||||
return list, err
|
||||
err = errors.New("Unable to find the polls with the following IDs: " + sidList)
|
||||
}
|
||||
|
||||
return list, err
|
||||
}
|
||||
|
||||
func (s *DefaultPollStore) Reload(id int) error {
|
||||
p := &Poll{ID: id}
|
||||
var optionTxt []byte
|
||||
e := s.get.QueryRow(id).Scan(&p.ParentID, &p.ParentTable, &p.Type, &optionTxt, &p.VoteCount)
|
||||
if e != nil {
|
||||
_ = s.cache.Remove(id)
|
||||
return e
|
||||
}
|
||||
e = json.Unmarshal(optionTxt, &p.Options)
|
||||
if e != nil {
|
||||
_ = s.cache.Remove(id)
|
||||
return e
|
||||
}
|
||||
p.QuickOptions = s.unpackOptionsMap(p.Options)
|
||||
_ = s.cache.Set(p)
|
||||
return nil
|
||||
func (store *DefaultPollStore) Reload(id int) error {
|
||||
poll := &Poll{ID: id}
|
||||
var optionTxt []byte
|
||||
err := store.get.QueryRow(id).Scan(&poll.ParentID, &poll.ParentTable, &poll.Type, &optionTxt, &poll.VoteCount)
|
||||
if err != nil {
|
||||
store.cache.Remove(id)
|
||||
return err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(optionTxt, &poll.Options)
|
||||
if err != nil {
|
||||
store.cache.Remove(id)
|
||||
return err
|
||||
}
|
||||
|
||||
poll.QuickOptions = store.unpackOptionsMap(poll.Options)
|
||||
_ = store.cache.Set(poll)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DefaultPollStore) unpackOptionsMap(rawOptions map[int]string) []PollOption {
|
||||
opts := make([]PollOption, len(rawOptions))
|
||||
for id, opt := range rawOptions {
|
||||
opts[id] = PollOption{id, opt}
|
||||
}
|
||||
return opts
|
||||
func (store *DefaultPollStore) unpackOptionsMap(rawOptions map[int]string) []PollOption {
|
||||
options := make([]PollOption, len(rawOptions))
|
||||
for id, option := range rawOptions {
|
||||
options[id] = PollOption{id, option}
|
||||
}
|
||||
return options
|
||||
}
|
||||
|
||||
func (s *DefaultPollStore) ClearIPs() error {
|
||||
_, e := s.clearIPs.Exec()
|
||||
return e
|
||||
// TODO: Use a transaction for this?
|
||||
func (store *DefaultPollStore) CastVote(optionIndex int, pollID int, uid int, ipaddress string) error {
|
||||
_, err := store.addVote.Exec(pollID, uid, optionIndex, ipaddress)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = store.incrementVoteCount.Exec(pollID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = store.incrementVoteCountForOption.Exec(optionIndex, pollID)
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Use a transaction for this
|
||||
func (s *DefaultPollStore) Create(parent Pollable, pollType int, pollOptions map[int]string) (id int, e error) {
|
||||
// TODO: Move the option names into the polls_options table and get rid of this json sludge?
|
||||
pollOptionsTxt, e := json.Marshal(pollOptions)
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
res, e := s.createPoll.Exec(parent.GetID(), parent.GetTable(), pollType, pollOptionsTxt)
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
lastID, e := res.LastInsertId()
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
func (store *DefaultPollStore) Create(parent Pollable, pollType int, pollOptions map[int]string) (id int, err error) {
|
||||
pollOptionsTxt, err := json.Marshal(pollOptions)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
for i := 0; i < len(pollOptions); i++ {
|
||||
_, e := s.createPollOption.Exec(lastID, i)
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
}
|
||||
res, err := store.createPoll.Exec(parent.GetID(), parent.GetTable(), pollType, pollOptionsTxt)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
id = int(lastID)
|
||||
return id, parent.SetPoll(id) // TODO: Delete the poll (and options) if SetPoll fails
|
||||
lastID, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
for i := 0; i < len(pollOptions); i++ {
|
||||
_, err := store.createPollOption.Exec(lastID, i)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
return int(lastID), parent.SetPoll(int(lastID)) // TODO: Delete the poll (and options) if SetPoll fails
|
||||
}
|
||||
|
||||
func (s *DefaultPollStore) Count() int {
|
||||
return Count(s.count)
|
||||
}
|
||||
|
||||
func (s *DefaultPollStore) SetCache(cache PollCache) {
|
||||
s.cache = cache
|
||||
func (store *DefaultPollStore) SetCache(cache PollCache) {
|
||||
store.cache = cache
|
||||
}
|
||||
|
||||
// TODO: We're temporarily doing this so that you can do ucache != nil in getTopicUser. Refactor it.
|
||||
func (s *DefaultPollStore) GetCache() PollCache {
|
||||
_, ok := s.cache.(*NullPollCache)
|
||||
if ok {
|
||||
return nil
|
||||
}
|
||||
return s.cache
|
||||
func (store *DefaultPollStore) GetCache() PollCache {
|
||||
_, ok := store.cache.(*NullPollCache)
|
||||
if ok {
|
||||
return nil
|
||||
}
|
||||
return store.cache
|
||||
}
|
||||
|
@ -3,73 +3,61 @@ package common
|
||||
import (
|
||||
"database/sql"
|
||||
"html"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var profileReplyStmts ProfileReplyStmts
|
||||
|
||||
type ProfileReply struct {
|
||||
ID int
|
||||
ParentID int
|
||||
Content string
|
||||
CreatedBy int
|
||||
Group int
|
||||
CreatedAt time.Time
|
||||
LastEdit int
|
||||
LastEditBy int
|
||||
ContentLines int
|
||||
IP string
|
||||
ID int
|
||||
ParentID int
|
||||
Content string
|
||||
CreatedBy int
|
||||
Group int
|
||||
CreatedAt time.Time
|
||||
RelativeCreatedAt string
|
||||
LastEdit int
|
||||
LastEditBy int
|
||||
ContentLines int
|
||||
IPAddress string
|
||||
}
|
||||
|
||||
type ProfileReplyStmts struct {
|
||||
edit *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
edit *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
}
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
ur := "users_replies"
|
||||
profileReplyStmts = ProfileReplyStmts{
|
||||
edit: acc.Update(ur).Set("content=?,parsed_content=?").Where("rid=?").Prepare(),
|
||||
delete: acc.Delete(ur).Where("rid=?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
profileReplyStmts = ProfileReplyStmts{
|
||||
edit: acc.Update("users_replies").Set("content = ?, parsed_content = ?").Where("rid = ?").Prepare(),
|
||||
delete: acc.Delete("users_replies").Where("rid = ?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
// Mostly for tests, so we don't wind up with out-of-date profile reply initialisation logic there
|
||||
func BlankProfileReply(id int) *ProfileReply {
|
||||
return &ProfileReply{ID: id}
|
||||
return &ProfileReply{ID: id}
|
||||
}
|
||||
|
||||
// TODO: Write tests for this
|
||||
func (r *ProfileReply) Delete() error {
|
||||
_, err := profileReplyStmts.delete.Exec(r.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Better coupling between the two paramsextra queries
|
||||
aids, err := Activity.AidsByParamsExtra("reply", r.ParentID, "user", strconv.Itoa(r.ID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, aid := range aids {
|
||||
DismissAlert(r.ParentID, aid)
|
||||
}
|
||||
err = Activity.DeleteByParamsExtra("reply", r.ParentID, "user", strconv.Itoa(r.ID))
|
||||
return err
|
||||
func (reply *ProfileReply) Delete() error {
|
||||
_, err := profileReplyStmts.delete.Exec(reply.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (r *ProfileReply) SetBody(content string) error {
|
||||
content = PreparseMessage(html.UnescapeString(content))
|
||||
_, err := profileReplyStmts.edit.Exec(content, ParseMessage(content, 0, "", nil, nil), r.ID)
|
||||
return err
|
||||
func (reply *ProfileReply) SetBody(content string) error {
|
||||
content = PreparseMessage(html.UnescapeString(content))
|
||||
parsedContent := ParseMessage(content, 0, "")
|
||||
_, err := profileReplyStmts.edit.Exec(content, parsedContent, reply.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: We can get this from the topic store instead of a query which will always miss the cache...
|
||||
func (r *ProfileReply) Creator() (*User, error) {
|
||||
return Users.Get(r.CreatedBy)
|
||||
func (reply *ProfileReply) Creator() (*User, error) {
|
||||
return Users.Get(reply.CreatedBy)
|
||||
}
|
||||
|
@ -3,79 +3,46 @@ package common
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var Prstore ProfileReplyStore
|
||||
|
||||
type ProfileReplyStore interface {
|
||||
Get(id int) (*ProfileReply, error)
|
||||
Exists(id int) bool
|
||||
ClearIPs() error
|
||||
Create(profileID int, content string, createdBy int, ip string) (id int, err error)
|
||||
Count() (count int)
|
||||
Get(id int) (*ProfileReply, error)
|
||||
Create(profileID int, content string, createdBy int, ipaddress string) (id int, err error)
|
||||
}
|
||||
|
||||
// TODO: Refactor this to stop using the global stmt store
|
||||
// TODO: Add more methods to this like Create()
|
||||
type SQLProfileReplyStore struct {
|
||||
get *sql.Stmt
|
||||
exists *sql.Stmt
|
||||
create *sql.Stmt
|
||||
count *sql.Stmt
|
||||
|
||||
clearIPs *sql.Stmt
|
||||
get *sql.Stmt
|
||||
create *sql.Stmt
|
||||
}
|
||||
|
||||
func NewSQLProfileReplyStore(acc *qgen.Accumulator) (*SQLProfileReplyStore, error) {
|
||||
ur := "users_replies"
|
||||
return &SQLProfileReplyStore{
|
||||
get: acc.Select(ur).Columns("uid,content,createdBy,createdAt,lastEdit,lastEditBy,ip").Where("rid=?").Stmt(),
|
||||
exists: acc.Exists(ur, "rid").Prepare(),
|
||||
create: acc.Insert(ur).Columns("uid,content,parsed_content,createdAt,createdBy,ip").Fields("?,?,?,UTC_TIMESTAMP(),?,?").Prepare(),
|
||||
count: acc.Count(ur).Stmt(),
|
||||
|
||||
clearIPs: acc.Update(ur).Set("ip=''").Where("ip!=''").Stmt(),
|
||||
}, acc.FirstError()
|
||||
return &SQLProfileReplyStore{
|
||||
get: acc.Select("users_replies").Columns("uid, content, createdBy, createdAt, lastEdit, lastEditBy, ipaddress").Where("rid = ?").Prepare(),
|
||||
create: acc.Insert("users_replies").Columns("uid, content, parsed_content, createdAt, createdBy, ipaddress").Fields("?,?,?,UTC_TIMESTAMP(),?,?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *SQLProfileReplyStore) Get(id int) (*ProfileReply, error) {
|
||||
r := ProfileReply{ID: id}
|
||||
e := s.get.QueryRow(id).Scan(&r.ParentID, &r.Content, &r.CreatedBy, &r.CreatedAt, &r.LastEdit, &r.LastEditBy, &r.IP)
|
||||
return &r, e
|
||||
func (store *SQLProfileReplyStore) Get(id int) (*ProfileReply, error) {
|
||||
reply := ProfileReply{ID: id}
|
||||
err := store.get.QueryRow(id).Scan(&reply.ParentID, &reply.Content, &reply.CreatedBy, &reply.CreatedAt, &reply.LastEdit, &reply.LastEditBy, &reply.IPAddress)
|
||||
return &reply, err
|
||||
}
|
||||
|
||||
func (s *SQLProfileReplyStore) Exists(id int) bool {
|
||||
e := s.exists.QueryRow(id).Scan(&id)
|
||||
if e != nil && e != ErrNoRows {
|
||||
LogError(e)
|
||||
}
|
||||
return e != ErrNoRows
|
||||
}
|
||||
func (store *SQLProfileReplyStore) Create(profileID int, content string, createdBy int, ipaddress string) (id int, err error) {
|
||||
res, err := store.create.Exec(profileID, content, ParseMessage(content, 0, ""), createdBy, ipaddress)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
lastID, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
func (s *SQLProfileReplyStore) ClearIPs() error {
|
||||
_, e := s.clearIPs.Exec()
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *SQLProfileReplyStore) Create(profileID int, content string, createdBy int, ip string) (id int, e error) {
|
||||
if Config.DisablePostIP {
|
||||
ip = ""
|
||||
}
|
||||
res, e := s.create.Exec(profileID, content, ParseMessage(content, 0, "", nil, nil), createdBy, ip)
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
lastID, e := res.LastInsertId()
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
// Should we reload the user?
|
||||
return int(lastID), e
|
||||
}
|
||||
|
||||
// TODO: Write a test for this
|
||||
// Count returns the total number of topic replies on these forums
|
||||
func (s *SQLProfileReplyStore) Count() (count int) {
|
||||
return Count(s.count)
|
||||
// Should we reload the user?
|
||||
return int(lastID), err
|
||||
}
|
||||
|
@ -1,132 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
//"log"
|
||||
"time"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var GroupPromotions GroupPromotionStore
|
||||
|
||||
type GroupPromotion struct {
|
||||
ID int
|
||||
From int
|
||||
To int
|
||||
TwoWay bool
|
||||
|
||||
Level int
|
||||
Posts int
|
||||
MinTime int
|
||||
RegisteredFor int
|
||||
}
|
||||
|
||||
type GroupPromotionStore interface {
|
||||
GetByGroup(gid int) (gps []*GroupPromotion, err error)
|
||||
Get(id int) (*GroupPromotion, error)
|
||||
PromoteIfEligible(u *User, level, posts int, registeredAt time.Time) error
|
||||
Delete(id int) error
|
||||
Create(from, to int, twoWay bool, level, posts, registeredFor int) (int, error)
|
||||
}
|
||||
|
||||
type DefaultGroupPromotionStore struct {
|
||||
getByGroup *sql.Stmt
|
||||
get *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
create *sql.Stmt
|
||||
|
||||
getByUser *sql.Stmt
|
||||
getByUserMins *sql.Stmt
|
||||
updateUser *sql.Stmt
|
||||
updateGeneric *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultGroupPromotionStore(acc *qgen.Accumulator) (*DefaultGroupPromotionStore, error) {
|
||||
ugp := "users_groups_promotions"
|
||||
prs := &DefaultGroupPromotionStore{
|
||||
getByGroup: acc.Select(ugp).Columns("pid, from_gid, to_gid, two_way, level, posts, minTime, registeredFor").Where("from_gid=? OR to_gid=?").Prepare(),
|
||||
get: acc.Select(ugp).Columns("from_gid, to_gid, two_way, level, posts, minTime, registeredFor").Where("pid=?").Prepare(),
|
||||
delete: acc.Delete(ugp).Where("pid=?").Prepare(),
|
||||
create: acc.Insert(ugp).Columns("from_gid, to_gid, two_way, level, posts, minTime, registeredFor").Fields("?,?,?,?,?,?,?").Prepare(),
|
||||
|
||||
getByUserMins: acc.Select(ugp).Columns("pid, to_gid, two_way, level, posts, minTime, registeredFor").Where("from_gid=? AND level<=? AND posts<=? AND registeredFor<=?").Orderby("level DESC").Limit("1").Prepare(),
|
||||
getByUser: acc.Select(ugp).Columns("pid, to_gid, two_way, level, posts, minTime, registeredFor").Where("from_gid=? AND level<=? AND posts<=?").Orderby("level DESC").Limit("1").Prepare(),
|
||||
updateUser: acc.Update("users").Set("group=?").Where("group=? AND uid=?").Prepare(),
|
||||
updateGeneric: acc.Update("users").Set("group=?").Where("group=? AND level>=? AND posts>=?").Prepare(),
|
||||
}
|
||||
Tasks.FifteenMin.Add(prs.Tick)
|
||||
return prs, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultGroupPromotionStore) Tick() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DefaultGroupPromotionStore) GetByGroup(gid int) (gps []*GroupPromotion, err error) {
|
||||
rows, err := s.getByGroup.Query(gid, gid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
g := &GroupPromotion{}
|
||||
err := rows.Scan(&g.ID, &g.From, &g.To, &g.TwoWay, &g.Level, &g.Posts, &g.MinTime, &g.RegisteredFor)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
gps = append(gps, g)
|
||||
}
|
||||
return gps, rows.Err()
|
||||
}
|
||||
|
||||
// TODO: Cache the group promotions to avoid hitting the database as much
|
||||
func (s *DefaultGroupPromotionStore) Get(id int) (*GroupPromotion, error) {
|
||||
/*g, err := s.cache.Get(id)
|
||||
if err == nil {
|
||||
return u, nil
|
||||
}*/
|
||||
|
||||
g := &GroupPromotion{ID: id}
|
||||
err := s.get.QueryRow(id).Scan(&g.From, &g.To, &g.TwoWay, &g.Level, &g.Posts, &g.MinTime, &g.RegisteredFor)
|
||||
if err == nil {
|
||||
//s.cache.Set(u)
|
||||
}
|
||||
return g, err
|
||||
}
|
||||
|
||||
// TODO: Optimise this to avoid the query
|
||||
func (s *DefaultGroupPromotionStore) PromoteIfEligible(u *User, level, posts int, registeredAt time.Time) error {
|
||||
mins := time.Since(registeredAt).Minutes()
|
||||
g := &GroupPromotion{From: u.Group}
|
||||
//log.Printf("pre getByUserMins: %+v\n", u)
|
||||
err := s.getByUserMins.QueryRow(u.Group, level, posts, mins).Scan(&g.ID, &g.To, &g.TwoWay, &g.Level, &g.Posts, &g.MinTime, &g.RegisteredFor)
|
||||
if err == sql.ErrNoRows {
|
||||
//log.Print("no matches found")
|
||||
return nil
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
//log.Printf("g: %+v\n", g)
|
||||
if g.RegisteredFor == 0 {
|
||||
_, err = s.updateGeneric.Exec(g.To, g.From, g.Level, g.Posts)
|
||||
} else {
|
||||
_, err = s.updateUser.Exec(g.To, g.From, u.ID)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *DefaultGroupPromotionStore) Delete(id int) error {
|
||||
_, err := s.delete.Exec(id)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *DefaultGroupPromotionStore) Create(from, to int, twoWay bool, level, posts, registeredFor int) (int, error) {
|
||||
res, err := s.create.Exec(from, to, twoWay, level, posts, 0, registeredFor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
lastID, err := res.LastInsertId()
|
||||
return int(lastID), err
|
||||
}
|
@ -1,100 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
var ErrBadRateLimiter = errors.New("That rate limiter doesn't exist")
|
||||
var ErrExceededRateLimit = errors.New("You're exceeding a rate limit. Please wait a while before trying again.")
|
||||
|
||||
// TODO: Persist rate limits to disk
|
||||
type RateLimiter interface {
|
||||
LimitIP(limit, ip string) error
|
||||
LimitUser(limit string, user int) error
|
||||
}
|
||||
|
||||
type RateData struct {
|
||||
value int
|
||||
floorTime int
|
||||
}
|
||||
|
||||
type RateFence struct {
|
||||
duration int
|
||||
max int
|
||||
}
|
||||
|
||||
// TODO: Optimise this by using something other than a string when possible
|
||||
type RateLimit struct {
|
||||
data map[string][]RateData
|
||||
fences []RateFence
|
||||
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
func NewRateLimit(fences []RateFence) *RateLimit {
|
||||
for i, fence := range fences {
|
||||
fences[i].duration = fence.duration * 1000 * 1000 * 1000
|
||||
}
|
||||
return &RateLimit{data: make(map[string][]RateData), fences: fences}
|
||||
}
|
||||
|
||||
func (l *RateLimit) Limit(name string, ltype int) error {
|
||||
l.Lock()
|
||||
defer l.Unlock()
|
||||
|
||||
data, ok := l.data[name]
|
||||
if !ok {
|
||||
data = make([]RateData, len(l.fences))
|
||||
for i, _ := range data {
|
||||
data[i] = RateData{0, int(time.Now().Unix())}
|
||||
}
|
||||
}
|
||||
|
||||
for i, field := range data {
|
||||
fence := l.fences[i]
|
||||
diff := int(time.Now().Unix()) - field.floorTime
|
||||
|
||||
if diff >= fence.duration {
|
||||
field = RateData{0, int(time.Now().Unix())}
|
||||
data[i] = field
|
||||
}
|
||||
|
||||
if field.value > fence.max {
|
||||
return ErrExceededRateLimit
|
||||
}
|
||||
|
||||
field.value++
|
||||
data[i] = field
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type DefaultRateLimiter struct {
|
||||
limits map[string]*RateLimit
|
||||
}
|
||||
|
||||
func NewDefaultRateLimiter() *DefaultRateLimiter {
|
||||
return &DefaultRateLimiter{map[string]*RateLimit{
|
||||
"register": NewRateLimit([]RateFence{{int(time.Hour / 2), 1}}),
|
||||
}}
|
||||
}
|
||||
|
||||
func (l *DefaultRateLimiter) LimitIP(limit, ip string) error {
|
||||
limiter, ok := l.limits[limit]
|
||||
if !ok {
|
||||
return ErrBadRateLimiter
|
||||
}
|
||||
return limiter.Limit(ip, 0)
|
||||
}
|
||||
|
||||
func (l *DefaultRateLimiter) LimitUser(limit string, user int) error {
|
||||
limiter, ok := l.limits[limit]
|
||||
if !ok {
|
||||
return ErrBadRateLimiter
|
||||
}
|
||||
return limiter.Limit(strconv.Itoa(user), 1)
|
||||
}
|
196
common/recalc.go
196
common/recalc.go
@ -1,196 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
//"log"
|
||||
"strconv"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var Recalc RecalcInt
|
||||
|
||||
type RecalcInt interface {
|
||||
Replies() (count int, err error)
|
||||
Forums() (count int, err error)
|
||||
Subscriptions() (count int, err error)
|
||||
ActivityStream() (count int, err error)
|
||||
Users() error
|
||||
Attachments() (count int, err error)
|
||||
}
|
||||
|
||||
type DefaultRecalc struct {
|
||||
getActivitySubscriptions *sql.Stmt
|
||||
getActivityStream *sql.Stmt
|
||||
getAttachments *sql.Stmt
|
||||
getTopicCount *sql.Stmt
|
||||
resetTopicCount *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultRecalc(acc *qgen.Accumulator) (*DefaultRecalc, error) {
|
||||
return &DefaultRecalc{
|
||||
getActivitySubscriptions: acc.Select("activity_subscriptions").Columns("targetID,targetType").Prepare(),
|
||||
getActivityStream: acc.Select("activity_stream").Columns("asid,event,elementID,elementType,extra").Prepare(),
|
||||
getAttachments: acc.Select("attachments").Columns("attachID,originID,originTable").Prepare(),
|
||||
getTopicCount: acc.Count("topics").Where("parentID=?").Prepare(),
|
||||
//resetTopicCount: acc.SimpleUpdateSelect("forums", "topicCount = tc", "topics", "count(*) as tc", "parentID=?", "", ""),
|
||||
// TODO: Avoid using RawPrepare
|
||||
resetTopicCount: acc.RawPrepare("UPDATE forums, (SELECT COUNT(*) as tc FROM topics WHERE parentID=?) AS src SET forums.topicCount=src.tc WHERE forums.fid=?"),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultRecalc) Replies() (count int, err error) {
|
||||
var ltid int
|
||||
err = Rstore.Each(func(r *Reply) error {
|
||||
if ltid == r.ParentID && r.ParentID > 0 {
|
||||
//return nil
|
||||
}
|
||||
if !Topics.Exists(r.ParentID) {
|
||||
// TODO: Delete in chunks not one at a time?
|
||||
if err := r.Delete(); err != nil {
|
||||
return err
|
||||
}
|
||||
count++
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return count, err
|
||||
}
|
||||
|
||||
func (s *DefaultRecalc) Forums() (count int, err error) {
|
||||
err = Forums.Each(func(f *Forum) error {
|
||||
_, err := s.resetTopicCount.Exec(f.ID, f.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
count++
|
||||
return nil
|
||||
})
|
||||
return count, err
|
||||
}
|
||||
|
||||
func (s *DefaultRecalc) Subscriptions() (count int, err error) {
|
||||
err = eachall(s.getActivitySubscriptions, func(r *sql.Rows) error {
|
||||
var targetID int
|
||||
var targetType string
|
||||
err := r.Scan(&targetID, &targetType)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if targetType == "topic" {
|
||||
if !Topics.Exists(targetID) {
|
||||
// TODO: Delete in chunks not one at a time?
|
||||
err := Subscriptions.DeleteResource(targetID, targetType)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
count++
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return count, err
|
||||
}
|
||||
|
||||
type Existable interface {
|
||||
Exists(id int) bool
|
||||
}
|
||||
|
||||
func (s *DefaultRecalc) ActivityStream() (count int, err error) {
|
||||
err = eachall(s.getActivityStream, func(r *sql.Rows) error {
|
||||
var asid, elementID int
|
||||
var event, elementType, extra string
|
||||
err := r.Scan(&asid, &event, &elementID, &elementType, &extra)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
//log.Print("asid:",asid)
|
||||
var s Existable
|
||||
switch elementType {
|
||||
case "user":
|
||||
if event == "reply" {
|
||||
extraI, _ := strconv.Atoi(extra)
|
||||
if extraI > 0 {
|
||||
s = Prstore
|
||||
elementID = extraI
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
case "topic":
|
||||
s = Topics
|
||||
// TODO: Delete reply events with an empty extra field
|
||||
if event == "reply" {
|
||||
extraI, _ := strconv.Atoi(extra)
|
||||
if extraI > 0 {
|
||||
s = Rstore
|
||||
elementID = extraI
|
||||
}
|
||||
}
|
||||
case "post":
|
||||
s = Rstore
|
||||
// TODO: Add a TopicExistsByReplyID for efficiency
|
||||
/*_, err = TopicByReplyID(elementID)
|
||||
if err == sql.ErrNoRows {
|
||||
// TODO: Delete in chunks not one at a time?
|
||||
err := Activity.Delete(asid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
count++
|
||||
} else if err != nil {
|
||||
return err
|
||||
}*/
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
if !s.Exists(elementID) {
|
||||
// TODO: Delete in chunks not one at a time?
|
||||
err := Activity.Delete(asid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
count++
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return count, err
|
||||
}
|
||||
|
||||
func (s *DefaultRecalc) Users() error {
|
||||
return Users.Each(func(u *User) error {
|
||||
return u.RecalcPostStats()
|
||||
})
|
||||
}
|
||||
|
||||
func (s *DefaultRecalc) Attachments() (count int, err error) {
|
||||
err = eachall(s.getAttachments, func(r *sql.Rows) error {
|
||||
var aid, originID int
|
||||
var originType string
|
||||
err := r.Scan(&aid, &originID, &originType)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var s Existable
|
||||
switch originType {
|
||||
case "topics":
|
||||
s = Topics
|
||||
case "replies":
|
||||
s = Rstore
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
if !s.Exists(originID) {
|
||||
// TODO: Delete in chunks not one at a time?
|
||||
err := Attachments.Delete(aid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
count++
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return count, err
|
||||
}
|
@ -1,143 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var UserBlocks BlockStore
|
||||
|
||||
//var UserFriends FriendStore
|
||||
|
||||
type BlockStore interface {
|
||||
IsBlockedBy(blocker, blockee int) (bool, error)
|
||||
BulkIsBlockedBy(blockers []int, blockee int) (bool, error)
|
||||
Add(blocker, blockee int) error
|
||||
Remove(blocker, blockee int) error
|
||||
BlockedByOffset(blocker, offset, perPage int) ([]int, error)
|
||||
BlockedByCount(blocker int) int
|
||||
}
|
||||
|
||||
type DefaultBlockStore struct {
|
||||
isBlocked *sql.Stmt
|
||||
add *sql.Stmt
|
||||
remove *sql.Stmt
|
||||
blockedBy *sql.Stmt
|
||||
blockedByCount *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultBlockStore(acc *qgen.Accumulator) (*DefaultBlockStore, error) {
|
||||
ub := "users_blocks"
|
||||
return &DefaultBlockStore{
|
||||
isBlocked: acc.Select(ub).Cols("blocker").Where("blocker=? AND blockedUser=?").Prepare(),
|
||||
add: acc.Insert(ub).Columns("blocker,blockedUser").Fields("?,?").Prepare(),
|
||||
remove: acc.Delete(ub).Where("blocker=? AND blockedUser=?").Prepare(),
|
||||
blockedBy: acc.Select(ub).Columns("blockedUser").Where("blocker=?").Limit("?,?").Prepare(),
|
||||
blockedByCount: acc.Count(ub).Where("blocker=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultBlockStore) IsBlockedBy(blocker, blockee int) (bool, error) {
|
||||
e := s.isBlocked.QueryRow(blocker, blockee).Scan(&blocker)
|
||||
if e == ErrNoRows {
|
||||
return false, nil
|
||||
}
|
||||
return e == nil, e
|
||||
}
|
||||
|
||||
// TODO: Optimise the query to avoid preparing it on the spot? Maybe, use knowledge of the most common IN() parameter counts?
|
||||
func (s *DefaultBlockStore) BulkIsBlockedBy(blockers []int, blockee int) (bool, error) {
|
||||
if len(blockers) == 0 {
|
||||
return false, nil
|
||||
}
|
||||
if len(blockers) == 1 {
|
||||
return s.IsBlockedBy(blockers[0], blockee)
|
||||
}
|
||||
idList, q := inqbuild(blockers)
|
||||
count, e := qgen.NewAcc().Count("users_blocks").Where("blocker IN(" + q + ") AND blockedUser=?").TotalP(idList...)
|
||||
if e == ErrNoRows {
|
||||
return false, nil
|
||||
}
|
||||
return count == 0, e
|
||||
}
|
||||
|
||||
func (s *DefaultBlockStore) Add(blocker, blockee int) error {
|
||||
_, e := s.add.Exec(blocker, blockee)
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *DefaultBlockStore) Remove(blocker, blockee int) error {
|
||||
_, e := s.remove.Exec(blocker, blockee)
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *DefaultBlockStore) BlockedByOffset(blocker, offset, perPage int) (uids []int, err error) {
|
||||
rows, e := s.blockedBy.Query(blocker, offset, perPage)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
var uid int
|
||||
if e := rows.Scan(&uid); e != nil {
|
||||
return nil, e
|
||||
}
|
||||
uids = append(uids, uid)
|
||||
}
|
||||
return uids, rows.Err()
|
||||
}
|
||||
|
||||
func (s *DefaultBlockStore) BlockedByCount(blocker int) (count int) {
|
||||
e := s.blockedByCount.QueryRow(blocker).Scan(&count)
|
||||
if e != nil {
|
||||
LogError(e)
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
type FriendInvite struct {
|
||||
Requester int
|
||||
Target int
|
||||
}
|
||||
|
||||
type FriendStore interface {
|
||||
AddInvite(requester, target int) error
|
||||
Confirm(requester, target int) error
|
||||
GetOwSentInvites(uid int) ([]FriendInvite, error)
|
||||
GetOwnRecvInvites(uid int) ([]FriendInvite, error)
|
||||
}
|
||||
|
||||
type DefaultFriendStore struct {
|
||||
addInvite *sql.Stmt
|
||||
confirm *sql.Stmt
|
||||
getOwnSentInvites *sql.Stmt
|
||||
getOwnRecvInvites *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultFriendStore(acc *qgen.Accumulator) (*DefaultFriendStore, error) {
|
||||
ufi := "users_friends_invites"
|
||||
return &DefaultFriendStore{
|
||||
addInvite: acc.Insert(ufi).Columns("requester,target").Fields("?,?").Prepare(),
|
||||
confirm: acc.Insert("users_friends").Columns("uid,uid2").Fields("?,?").Prepare(),
|
||||
getOwnSentInvites: acc.Select(ufi).Cols("requester,target").Where("requester=?").Prepare(),
|
||||
getOwnRecvInvites: acc.Select(ufi).Cols("requester,target").Where("target=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *DefaultFriendStore) AddInvite(requester, target int) error {
|
||||
_, e := s.addInvite.Exec(requester, target)
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *DefaultFriendStore) Confirm(requester, target int) error {
|
||||
_, e := s.confirm.Exec(requester, target)
|
||||
return e
|
||||
}
|
||||
|
||||
func (s *DefaultFriendStore) GetOwnSentInvites(uid int) ([]FriendInvite, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (s *DefaultFriendStore) GetOwnRecvInvites(uid int) ([]FriendInvite, error) {
|
||||
return nil, nil
|
||||
}
|
300
common/reply.go
300
common/reply.go
@ -1,7 +1,7 @@
|
||||
/*
|
||||
*
|
||||
* Reply Resources File
|
||||
* Copyright Azareal 2016 - 2020
|
||||
* Copyright Azareal 2016 - 2018
|
||||
*
|
||||
*/
|
||||
package common
|
||||
@ -10,222 +10,152 @@ import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"html"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
type ReplyUser struct {
|
||||
Reply
|
||||
|
||||
ContentHtml string
|
||||
UserLink string
|
||||
CreatedByName string
|
||||
Avatar string
|
||||
MicroAvatar string
|
||||
ClassName string
|
||||
Tag string
|
||||
URL string
|
||||
//URLPrefix string
|
||||
//URLName string
|
||||
Group int
|
||||
Level int
|
||||
ActionIcon string
|
||||
|
||||
Attachments []*MiniAttachment
|
||||
Deletable bool
|
||||
ID int
|
||||
ParentID int
|
||||
Content string
|
||||
ContentHtml string
|
||||
CreatedBy int
|
||||
UserLink string
|
||||
CreatedByName string
|
||||
Group int
|
||||
CreatedAt time.Time
|
||||
RelativeCreatedAt string
|
||||
LastEdit int
|
||||
LastEditBy int
|
||||
Avatar string
|
||||
MicroAvatar string
|
||||
ClassName string
|
||||
ContentLines int
|
||||
Tag string
|
||||
URL string
|
||||
URLPrefix string
|
||||
URLName string
|
||||
Level int
|
||||
IPAddress string
|
||||
Liked bool
|
||||
LikeCount int
|
||||
ActionType string
|
||||
ActionIcon string
|
||||
}
|
||||
|
||||
type Reply struct {
|
||||
ID int
|
||||
ParentID int
|
||||
Content string
|
||||
CreatedBy int
|
||||
//Group int
|
||||
CreatedAt time.Time
|
||||
LastEdit int
|
||||
LastEditBy int
|
||||
ContentLines int
|
||||
IP string
|
||||
Liked bool
|
||||
LikeCount int
|
||||
AttachCount uint16
|
||||
ActionType string
|
||||
ID int
|
||||
ParentID int
|
||||
Content string
|
||||
CreatedBy int
|
||||
Group int
|
||||
CreatedAt time.Time
|
||||
RelativeCreatedAt string
|
||||
LastEdit int
|
||||
LastEditBy int
|
||||
ContentLines int
|
||||
IPAddress string
|
||||
Liked bool
|
||||
LikeCount int
|
||||
}
|
||||
|
||||
var ErrAlreadyLiked = errors.New("You already liked this!")
|
||||
var replyStmts ReplyStmts
|
||||
|
||||
type ReplyStmts struct {
|
||||
isLiked *sql.Stmt
|
||||
createLike *sql.Stmt
|
||||
edit *sql.Stmt
|
||||
setPoll *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
addLikesToReply *sql.Stmt
|
||||
removeRepliesFromTopic *sql.Stmt
|
||||
deleteLikesForReply *sql.Stmt
|
||||
deleteActivity *sql.Stmt
|
||||
deleteActivitySubs *sql.Stmt
|
||||
|
||||
updateTopicReplies *sql.Stmt
|
||||
updateTopicReplies2 *sql.Stmt
|
||||
|
||||
getAidsOfReply *sql.Stmt
|
||||
isLiked *sql.Stmt
|
||||
createLike *sql.Stmt
|
||||
edit *sql.Stmt
|
||||
setPoll *sql.Stmt
|
||||
delete *sql.Stmt
|
||||
addLikesToReply *sql.Stmt
|
||||
removeRepliesFromTopic *sql.Stmt
|
||||
}
|
||||
|
||||
func init() {
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
re := "replies"
|
||||
replyStmts = ReplyStmts{
|
||||
isLiked: acc.Select("likes").Columns("targetItem").Where("sentBy=? and targetItem=? and targetType='replies'").Prepare(),
|
||||
createLike: acc.Insert("likes").Columns("weight,targetItem,targetType,sentBy,createdAt").Fields("?,?,?,?,UTC_TIMESTAMP()").Prepare(),
|
||||
edit: acc.Update(re).Set("content=?,parsed_content=?").Where("rid=? AND poll=0").Prepare(),
|
||||
setPoll: acc.Update(re).Set("poll=?").Where("rid=? AND poll=0").Prepare(),
|
||||
delete: acc.Delete(re).Where("rid=?").Prepare(),
|
||||
addLikesToReply: acc.Update(re).Set("likeCount=likeCount+?").Where("rid=?").Prepare(),
|
||||
removeRepliesFromTopic: acc.Update("topics").Set("postCount=postCount-?").Where("tid=?").Prepare(),
|
||||
deleteLikesForReply: acc.Delete("likes").Where("targetItem=? AND targetType='replies'").Prepare(),
|
||||
deleteActivity: acc.Delete("activity_stream").Where("elementID=? AND elementType='post'").Prepare(),
|
||||
deleteActivitySubs: acc.Delete("activity_subscriptions").Where("targetID=? AND targetType='post'").Prepare(),
|
||||
|
||||
// TODO: Optimise this to avoid firing an update if it's not the last reply in a topic. We will need to set lastReplyID properly in other places and in the patcher first so we can use it here.
|
||||
updateTopicReplies: acc.RawPrepare("UPDATE topics t INNER JOIN replies r ON t.tid=r.tid SET t.lastReplyBy=r.createdBy, t.lastReplyAt=r.createdAt, t.lastReplyID=r.rid WHERE t.tid=? ORDER BY r.rid DESC"),
|
||||
updateTopicReplies2: acc.Update("topics").Set("lastReplyAt=createdAt,lastReplyBy=createdBy,lastReplyID=0").Where("postCount=1 AND tid=?").Prepare(),
|
||||
|
||||
getAidsOfReply: acc.Select("attachments").Columns("attachID").Where("originID=? AND originTable='replies'").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
replyStmts = ReplyStmts{
|
||||
isLiked: acc.Select("likes").Columns("targetItem").Where("sentBy = ? and targetItem = ? and targetType = 'replies'").Prepare(),
|
||||
createLike: acc.Insert("likes").Columns("weight, targetItem, targetType, sentBy").Fields("?,?,?,?").Prepare(),
|
||||
edit: acc.Update("replies").Set("content = ?, parsed_content = ?").Where("rid = ? AND poll = 0").Prepare(),
|
||||
setPoll: acc.Update("replies").Set("poll = ?").Where("rid = ? AND poll = 0").Prepare(),
|
||||
delete: acc.Delete("replies").Where("rid = ?").Prepare(),
|
||||
addLikesToReply: acc.Update("replies").Set("likeCount = likeCount + ?").Where("rid = ?").Prepare(),
|
||||
removeRepliesFromTopic: acc.Update("topics").Set("postCount = postCount - ?").Where("tid = ?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: Write tests for this
|
||||
// TODO: Wrap these queries in a transaction to make sure the state is consistent
|
||||
func (r *Reply) Like(uid int) (err error) {
|
||||
var rid int // unused, just here to avoid mutating reply.ID
|
||||
err = replyStmts.isLiked.QueryRow(uid, r.ID).Scan(&rid)
|
||||
if err != nil && err != ErrNoRows {
|
||||
return err
|
||||
} else if err != ErrNoRows {
|
||||
return ErrAlreadyLiked
|
||||
}
|
||||
func (reply *Reply) Like(uid int) (err error) {
|
||||
var rid int // unused, just here to avoid mutating reply.ID
|
||||
err = replyStmts.isLiked.QueryRow(uid, reply.ID).Scan(&rid)
|
||||
if err != nil && err != ErrNoRows {
|
||||
return err
|
||||
} else if err != ErrNoRows {
|
||||
return ErrAlreadyLiked
|
||||
}
|
||||
|
||||
score := 1
|
||||
_, err = replyStmts.createLike.Exec(score, r.ID, "replies", uid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = replyStmts.addLikesToReply.Exec(1, r.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = userStmts.incLiked.Exec(1, uid)
|
||||
_ = Rstore.GetCache().Remove(r.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Use a transaction
|
||||
func (r *Reply) Unlike(uid int) error {
|
||||
err := Likes.Delete(r.ID, "replies")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = replyStmts.addLikesToReply.Exec(-1, r.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = userStmts.decLiked.Exec(1, uid)
|
||||
_ = Rstore.GetCache().Remove(r.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Refresh topic list?
|
||||
func (r *Reply) Delete() error {
|
||||
creator, err := Users.Get(r.CreatedBy)
|
||||
if err == nil {
|
||||
err = creator.DecreasePostStats(WordCount(r.Content), false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if err != ErrNoRows {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = replyStmts.delete.Exec(r.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Move this bit to *Topic
|
||||
_, err = replyStmts.removeRepliesFromTopic.Exec(1, r.ParentID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = replyStmts.updateTopicReplies.Exec(r.ParentID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = replyStmts.updateTopicReplies2.Exec(r.ParentID)
|
||||
tc := Topics.GetCache()
|
||||
if tc != nil {
|
||||
tc.Remove(r.ParentID)
|
||||
}
|
||||
_ = Rstore.GetCache().Remove(r.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = replyStmts.deleteLikesForReply.Exec(r.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = handleReplyAttachments(r.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = Activity.DeleteByParamsExtra("reply", r.ParentID, "topic", strconv.Itoa(r.ID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = replyStmts.deleteActivitySubs.Exec(r.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = replyStmts.deleteActivity.Exec(r.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (r *Reply) SetPost(content string) error {
|
||||
topic, err := r.Topic()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
content = PreparseMessage(html.UnescapeString(content))
|
||||
parsedContent := ParseMessage(content, topic.ParentID, "forums", nil, nil)
|
||||
_, err = replyStmts.edit.Exec(content, parsedContent, r.ID) // TODO: Sniff if this changed anything to see if we hit an existing poll
|
||||
_ = Rstore.GetCache().Remove(r.ID)
|
||||
return err
|
||||
score := 1
|
||||
_, err = replyStmts.createLike.Exec(score, reply.ID, "replies", uid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = replyStmts.addLikesToReply.Exec(1, reply.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = userStmts.incrementLiked.Exec(1, uid)
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Write tests for this
|
||||
func (r *Reply) SetPoll(pollID int) error {
|
||||
_, err := replyStmts.setPoll.Exec(pollID, r.ID) // TODO: Sniff if this changed anything to see if we hit a poll
|
||||
_ = Rstore.GetCache().Remove(r.ID)
|
||||
return err
|
||||
func (reply *Reply) Delete() error {
|
||||
_, err := replyStmts.delete.Exec(reply.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Move this bit to *Topic
|
||||
_, err = replyStmts.removeRepliesFromTopic.Exec(1, reply.ParentID)
|
||||
tcache := Topics.GetCache()
|
||||
if tcache != nil {
|
||||
tcache.Remove(reply.ParentID)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (r *Reply) Topic() (*Topic, error) {
|
||||
return Topics.Get(r.ParentID)
|
||||
func (reply *Reply) SetPost(content string) error {
|
||||
topic, err := reply.Topic()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
content = PreparseMessage(html.UnescapeString(content))
|
||||
parsedContent := ParseMessage(content, topic.ParentID, "forums")
|
||||
_, err = replyStmts.edit.Exec(content, parsedContent, reply.ID) // TODO: Sniff if this changed anything to see if we hit an existing poll
|
||||
return err
|
||||
}
|
||||
|
||||
func (r *Reply) GetID() int {
|
||||
return r.ID
|
||||
func (reply *Reply) SetPoll(pollID int) error {
|
||||
_, err := replyStmts.setPoll.Exec(pollID, reply.ID) // TODO: Sniff if this changed anything to see if we hit a poll
|
||||
return err
|
||||
}
|
||||
|
||||
func (r *Reply) GetTable() string {
|
||||
return "replies"
|
||||
func (reply *Reply) Topic() (*Topic, error) {
|
||||
return Topics.Get(reply.ParentID)
|
||||
}
|
||||
|
||||
func (reply *Reply) GetID() int {
|
||||
return reply.ID
|
||||
}
|
||||
|
||||
func (reply *Reply) GetTable() string {
|
||||
return "replies"
|
||||
}
|
||||
|
||||
// Copy gives you a non-pointer concurrency safe copy of the reply
|
||||
func (r *Reply) Copy() Reply {
|
||||
return *r
|
||||
func (reply *Reply) Copy() Reply {
|
||||
return *reply
|
||||
}
|
||||
|
@ -1,163 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
//"log"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
)
|
||||
|
||||
// ReplyCache is an interface which spits out replies from a fast cache rather than the database, whether from memory or from an application like Redis. Replies may not be present in the cache but may be in the database
|
||||
type ReplyCache interface {
|
||||
Get(id int) (*Reply, error)
|
||||
GetUnsafe(id int) (*Reply, error)
|
||||
BulkGet(ids []int) (list []*Reply)
|
||||
Set(item *Reply) error
|
||||
Add(item *Reply) error
|
||||
AddUnsafe(item *Reply) error
|
||||
Remove(id int) error
|
||||
RemoveUnsafe(id int) error
|
||||
Flush()
|
||||
Length() int
|
||||
SetCapacity(cap int)
|
||||
GetCapacity() int
|
||||
}
|
||||
|
||||
// MemoryReplyCache stores and pulls replies out of the current process' memory
|
||||
type MemoryReplyCache struct {
|
||||
items map[int]*Reply
|
||||
length int64 // sync/atomic only lets us operate on int32s and int64s
|
||||
capacity int
|
||||
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
// NewMemoryReplyCache gives you a new instance of MemoryReplyCache
|
||||
func NewMemoryReplyCache(cap int) *MemoryReplyCache {
|
||||
return &MemoryReplyCache{
|
||||
items: make(map[int]*Reply),
|
||||
capacity: cap,
|
||||
}
|
||||
}
|
||||
|
||||
// Get fetches a reply by ID. Returns ErrNoRows if not present.
|
||||
func (s *MemoryReplyCache) Get(id int) (*Reply, error) {
|
||||
s.RLock()
|
||||
item, ok := s.items[id]
|
||||
s.RUnlock()
|
||||
if ok {
|
||||
return item, nil
|
||||
}
|
||||
return item, ErrNoRows
|
||||
}
|
||||
|
||||
// GetUnsafe fetches a reply by ID. Returns ErrNoRows if not present. THIS METHOD IS NOT THREAD-SAFE.
|
||||
func (s *MemoryReplyCache) GetUnsafe(id int) (*Reply, error) {
|
||||
item, ok := s.items[id]
|
||||
if ok {
|
||||
return item, nil
|
||||
}
|
||||
return item, ErrNoRows
|
||||
}
|
||||
|
||||
// BulkGet fetches multiple replies by their IDs. Indices without replies will be set to nil, so make sure you check for those, we might want to change this behaviour to make it less confusing.
|
||||
func (s *MemoryReplyCache) BulkGet(ids []int) (list []*Reply) {
|
||||
list = make([]*Reply, len(ids))
|
||||
s.RLock()
|
||||
for i, id := range ids {
|
||||
list[i] = s.items[id]
|
||||
}
|
||||
s.RUnlock()
|
||||
return list
|
||||
}
|
||||
|
||||
// Set overwrites the value of a reply in the cache, whether it's present or not. May return a capacity overflow error.
|
||||
func (s *MemoryReplyCache) Set(item *Reply) error {
|
||||
s.Lock()
|
||||
_, ok := s.items[item.ID]
|
||||
if ok {
|
||||
s.items[item.ID] = item
|
||||
} else if int(s.length) >= s.capacity {
|
||||
s.Unlock()
|
||||
return ErrStoreCapacityOverflow
|
||||
} else {
|
||||
s.items[item.ID] = item
|
||||
atomic.AddInt64(&s.length, 1)
|
||||
}
|
||||
s.Unlock()
|
||||
return nil
|
||||
}
|
||||
|
||||
// Add adds a reply to the cache, similar to Set, but it's only intended for new items. This method might be deprecated in the near future, use Set. May return a capacity overflow error.
|
||||
// ? Is this redundant if we have Set? Are the efficiency wins worth this? Is this even used?
|
||||
func (s *MemoryReplyCache) Add(item *Reply) error {
|
||||
//log.Print("MemoryReplyCache.Add")
|
||||
s.Lock()
|
||||
if int(s.length) >= s.capacity {
|
||||
s.Unlock()
|
||||
return ErrStoreCapacityOverflow
|
||||
}
|
||||
s.items[item.ID] = item
|
||||
s.Unlock()
|
||||
atomic.AddInt64(&s.length, 1)
|
||||
return nil
|
||||
}
|
||||
|
||||
// AddUnsafe is the unsafe version of Add. May return a capacity overflow error. THIS METHOD IS NOT THREAD-SAFE.
|
||||
func (s *MemoryReplyCache) AddUnsafe(item *Reply) error {
|
||||
if int(s.length) >= s.capacity {
|
||||
return ErrStoreCapacityOverflow
|
||||
}
|
||||
s.items[item.ID] = item
|
||||
s.length = int64(len(s.items))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Remove removes a reply from the cache by ID, if they exist. Returns ErrNoRows if no items exist.
|
||||
func (s *MemoryReplyCache) Remove(id int) error {
|
||||
s.Lock()
|
||||
_, ok := s.items[id]
|
||||
if !ok {
|
||||
s.Unlock()
|
||||
return ErrNoRows
|
||||
}
|
||||
delete(s.items, id)
|
||||
s.Unlock()
|
||||
atomic.AddInt64(&s.length, -1)
|
||||
return nil
|
||||
}
|
||||
|
||||
// RemoveUnsafe is the unsafe version of Remove. THIS METHOD IS NOT THREAD-SAFE.
|
||||
func (s *MemoryReplyCache) RemoveUnsafe(id int) error {
|
||||
_, ok := s.items[id]
|
||||
if !ok {
|
||||
return ErrNoRows
|
||||
}
|
||||
delete(s.items, id)
|
||||
atomic.AddInt64(&s.length, -1)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Flush removes all the replies from the cache, useful for tests.
|
||||
func (s *MemoryReplyCache) Flush() {
|
||||
s.Lock()
|
||||
s.items = make(map[int]*Reply)
|
||||
s.length = 0
|
||||
s.Unlock()
|
||||
}
|
||||
|
||||
// ! Is this concurrent?
|
||||
// Length returns the number of replies in the memory cache
|
||||
func (s *MemoryReplyCache) Length() int {
|
||||
return int(s.length)
|
||||
}
|
||||
|
||||
// SetCapacity sets the maximum number of replies which this cache can hold
|
||||
func (s *MemoryReplyCache) SetCapacity(cap int) {
|
||||
// Ints are moved in a single instruction, so this should be thread-safe
|
||||
s.capacity = cap
|
||||
}
|
||||
|
||||
// GetCapacity returns the maximum number of replies this cache can hold
|
||||
func (s *MemoryReplyCache) GetCapacity() int {
|
||||
return s.capacity
|
||||
}
|
@ -1,147 +1,44 @@
|
||||
package common
|
||||
|
||||
//import "log"
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
import "database/sql"
|
||||
import "../query_gen/lib"
|
||||
|
||||
var Rstore ReplyStore
|
||||
|
||||
type ReplyStore interface {
|
||||
Get(id int) (*Reply, error)
|
||||
Each(f func(*Reply) error) error
|
||||
Exists(id int) bool
|
||||
ClearIPs() error
|
||||
Create(t *Topic, content, ip string, uid int) (id int, err error)
|
||||
Count() (count int)
|
||||
CountUser(uid int) (count int)
|
||||
CountMegaUser(uid int) (count int)
|
||||
CountBigUser(uid int) (count int)
|
||||
|
||||
SetCache(cache ReplyCache)
|
||||
GetCache() ReplyCache
|
||||
Get(id int) (*Reply, error)
|
||||
Create(topic *Topic, content string, ipaddress string, uid int) (id int, err error)
|
||||
}
|
||||
|
||||
type SQLReplyStore struct {
|
||||
cache ReplyCache
|
||||
|
||||
get *sql.Stmt
|
||||
getAll *sql.Stmt
|
||||
exists *sql.Stmt
|
||||
create *sql.Stmt
|
||||
count *sql.Stmt
|
||||
countUser *sql.Stmt
|
||||
countWordUser *sql.Stmt
|
||||
|
||||
clearIPs *sql.Stmt
|
||||
get *sql.Stmt
|
||||
create *sql.Stmt
|
||||
}
|
||||
|
||||
func NewSQLReplyStore(acc *qgen.Accumulator, cache ReplyCache) (*SQLReplyStore, error) {
|
||||
if cache == nil {
|
||||
cache = NewNullReplyCache()
|
||||
}
|
||||
re := "replies"
|
||||
return &SQLReplyStore{
|
||||
cache: cache,
|
||||
get: acc.Select(re).Columns("tid,content,createdBy,createdAt,lastEdit,lastEditBy,ip,likeCount,attachCount,actionType").Where("rid=?").Prepare(),
|
||||
getAll: acc.Select(re).Columns("rid,tid,content,createdBy,createdAt,lastEdit,lastEditBy,ip,likeCount,attachCount,actionType").Prepare(),
|
||||
exists: acc.Exists(re, "rid").Prepare(),
|
||||
create: acc.Insert(re).Columns("tid,content,parsed_content,createdAt,lastUpdated,ip,words,createdBy").Fields("?,?,?,UTC_TIMESTAMP(),UTC_TIMESTAMP(),?,?,?").Prepare(),
|
||||
count: acc.Count(re).Prepare(),
|
||||
countUser: acc.Count(re).Where("createdBy=?").Prepare(),
|
||||
countWordUser: acc.Count(re).Where("createdBy=? AND words>=?").Prepare(),
|
||||
|
||||
clearIPs: acc.Update(re).Set("ip=''").Where("ip!=''").Stmt(),
|
||||
}, acc.FirstError()
|
||||
func NewSQLReplyStore(acc *qgen.Accumulator) (*SQLReplyStore, error) {
|
||||
return &SQLReplyStore{
|
||||
get: acc.Select("replies").Columns("tid, content, createdBy, createdAt, lastEdit, lastEditBy, ipaddress, likeCount").Where("rid = ?").Prepare(),
|
||||
create: acc.Insert("replies").Columns("tid, content, parsed_content, createdAt, lastUpdated, ipaddress, words, createdBy").Fields("?,?,?,UTC_TIMESTAMP(),UTC_TIMESTAMP(),?,?,?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *SQLReplyStore) Get(id int) (*Reply, error) {
|
||||
r, err := s.cache.Get(id)
|
||||
if err == nil {
|
||||
return r, nil
|
||||
}
|
||||
|
||||
r = &Reply{ID: id}
|
||||
err = s.get.QueryRow(id).Scan(&r.ParentID, &r.Content, &r.CreatedBy, &r.CreatedAt, &r.LastEdit, &r.LastEditBy, &r.IP, &r.LikeCount, &r.AttachCount, &r.ActionType)
|
||||
if err == nil {
|
||||
_ = s.cache.Set(r)
|
||||
}
|
||||
return r, err
|
||||
}
|
||||
|
||||
/*func (s *SQLReplyStore) eachr(f func(*sql.Rows) error) error {
|
||||
return eachall(s.getAll, f)
|
||||
}*/
|
||||
|
||||
func (s *SQLReplyStore) Each(f func(*Reply) error) error {
|
||||
rows, err := s.getAll.Query()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
r := new(Reply)
|
||||
if err := rows.Scan(&r.ID, &r.ParentID, &r.Content, &r.CreatedBy, &r.CreatedAt, &r.LastEdit, &r.LastEditBy, &r.IP, &r.LikeCount, &r.AttachCount, &r.ActionType); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := f(r); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return rows.Err()
|
||||
}
|
||||
|
||||
func (s *SQLReplyStore) Exists(id int) bool {
|
||||
err := s.exists.QueryRow(id).Scan(&id)
|
||||
if err != nil && err != ErrNoRows {
|
||||
LogError(err)
|
||||
}
|
||||
return err != ErrNoRows
|
||||
}
|
||||
|
||||
func (s *SQLReplyStore) ClearIPs() error {
|
||||
_, e := s.clearIPs.Exec()
|
||||
return e
|
||||
func (store *SQLReplyStore) Get(id int) (*Reply, error) {
|
||||
reply := Reply{ID: id}
|
||||
err := store.get.QueryRow(id).Scan(&reply.ParentID, &reply.Content, &reply.CreatedBy, &reply.CreatedAt, &reply.LastEdit, &reply.LastEditBy, &reply.IPAddress, &reply.LikeCount)
|
||||
return &reply, err
|
||||
}
|
||||
|
||||
// TODO: Write a test for this
|
||||
func (s *SQLReplyStore) Create(t *Topic, content, ip string, uid int) (id int, err error) {
|
||||
if Config.DisablePostIP {
|
||||
ip = ""
|
||||
}
|
||||
res, err := s.create.Exec(t.ID, content, ParseMessage(content, t.ParentID, "forums", nil, nil), ip, WordCount(content), uid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
lastID, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
id = int(lastID)
|
||||
return id, t.AddReply(id, uid)
|
||||
}
|
||||
func (store *SQLReplyStore) Create(topic *Topic, content string, ipaddress string, uid int) (id int, err error) {
|
||||
wcount := WordCount(content)
|
||||
res, err := store.create.Exec(topic.ID, content, ParseMessage(content, topic.ParentID, "forums"), ipaddress, wcount, uid)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// TODO: Write a test for this
|
||||
// Count returns the total number of topic replies on these forums
|
||||
func (s *SQLReplyStore) Count() (count int) {
|
||||
return Countf(s.count)
|
||||
}
|
||||
func (s *SQLReplyStore) CountUser(uid int) (count int) {
|
||||
return Countf(s.countUser, uid)
|
||||
}
|
||||
func (s *SQLReplyStore) CountMegaUser(uid int) (count int) {
|
||||
return Countf(s.countWordUser, uid, SettingBox.Load().(SettingMap)["megapost_min_words"].(int))
|
||||
}
|
||||
func (s *SQLReplyStore) CountBigUser(uid int) (count int) {
|
||||
return Countf(s.countWordUser, uid, SettingBox.Load().(SettingMap)["bigpost_min_words"].(int))
|
||||
}
|
||||
|
||||
func (s *SQLReplyStore) SetCache(cache ReplyCache) {
|
||||
s.cache = cache
|
||||
}
|
||||
|
||||
func (s *SQLReplyStore) GetCache() ReplyCache {
|
||||
return s.cache
|
||||
lastID, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return int(lastID), topic.AddReply(uid)
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"errors"
|
||||
"strconv"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
// TODO: Make the default report forum ID configurable
|
||||
@ -17,46 +17,41 @@ var ErrAlreadyReported = errors.New("This item has already been reported")
|
||||
|
||||
// The report system mostly wraps around the topic system for simplicty
|
||||
type ReportStore interface {
|
||||
Create(title, content string, u *User, itemType string, itemID int) (int, error)
|
||||
Create(title string, content string, user *User, itemType string, itemID int) (int, error)
|
||||
}
|
||||
|
||||
type DefaultReportStore struct {
|
||||
create *sql.Stmt
|
||||
exists *sql.Stmt
|
||||
create *sql.Stmt
|
||||
exists *sql.Stmt
|
||||
}
|
||||
|
||||
func NewDefaultReportStore(acc *qgen.Accumulator) (*DefaultReportStore, error) {
|
||||
t := "topics"
|
||||
return &DefaultReportStore{
|
||||
create: acc.Insert(t).Columns("title, content, parsed_content, ip, createdAt, lastReplyAt, createdBy, lastReplyBy, data, parentID, css_class").Fields("?,?,?,?,UTC_TIMESTAMP(),UTC_TIMESTAMP(),?,?,?,?,'report'").Prepare(),
|
||||
exists: acc.Count(t).Where("data=? AND data!='' AND parentID=?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
return &DefaultReportStore{
|
||||
create: acc.Insert("topics").Columns("title, content, parsed_content, ipaddress, createdAt, lastReplyAt, createdBy, lastReplyBy, data, parentID, css_class").Fields("?,?,?,?,UTC_TIMESTAMP(),UTC_TIMESTAMP(),?,?,?,?,'report'").Prepare(),
|
||||
exists: acc.Count("topics").Where("data = ? AND data != '' AND parentID = ?").Prepare(),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
// ! There's a data race in this. If two users report one item at the exact same time, then both reports will go through
|
||||
func (s *DefaultReportStore) Create(title, content string, u *User, itemType string, itemID int) (tid int, err error) {
|
||||
var count int
|
||||
err = s.exists.QueryRow(itemType+"_"+strconv.Itoa(itemID), ReportForumID).Scan(&count)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return 0, err
|
||||
}
|
||||
if count != 0 {
|
||||
return 0, ErrAlreadyReported
|
||||
}
|
||||
func (store *DefaultReportStore) Create(title string, content string, user *User, itemType string, itemID int) (int, error) {
|
||||
var count int
|
||||
err := store.exists.QueryRow(itemType + "_" + strconv.Itoa(itemID)).Scan(&count)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return 0, err
|
||||
}
|
||||
if count != 0 {
|
||||
return 0, ErrAlreadyReported
|
||||
}
|
||||
|
||||
ip := u.GetIP()
|
||||
if Config.DisablePostIP {
|
||||
ip = ""
|
||||
}
|
||||
res, err := s.create.Exec(title, content, ParseMessage(content, 0, "", nil, nil), ip, u.ID, u.ID, itemType+"_"+strconv.Itoa(itemID), ReportForumID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
lastID, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
tid = int(lastID)
|
||||
res, err := store.create.Exec(title, content, ParseMessage(content, 0, ""), user.LastIP, user.ID, user.ID, itemType+"_"+strconv.Itoa(itemID), ReportForumID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return tid, Forums.AddTopic(tid, u.ID, ReportForumID)
|
||||
lastID, err := res.LastInsertId()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return int(lastID), Forums.AddTopic(int(lastID), user.ID, ReportForumID)
|
||||
}
|
||||
|
@ -1,19 +1,11 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"crypto/subtle"
|
||||
"html"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"git.tuxpa.in/a/gosora/common/phrases"
|
||||
"git.tuxpa.in/a/gosora/uutils"
|
||||
)
|
||||
|
||||
// nolint
|
||||
@ -23,548 +15,371 @@ var PreRoute func(http.ResponseWriter, *http.Request) (User, bool) = preRoute
|
||||
// nolint We need these types so people can tell what they are without scrolling to the bottom of the file
|
||||
var PanelUserCheck func(http.ResponseWriter, *http.Request, *User) (*Header, PanelStats, RouteError) = panelUserCheck
|
||||
var SimplePanelUserCheck func(http.ResponseWriter, *http.Request, *User) (*HeaderLite, RouteError) = simplePanelUserCheck
|
||||
var SimpleForumUserCheck func(w http.ResponseWriter, r *http.Request, u *User, fid int) (headerLite *HeaderLite, err RouteError) = simpleForumUserCheck
|
||||
var ForumUserCheck func(h *Header, w http.ResponseWriter, r *http.Request, u *User, fid int) (err RouteError) = forumUserCheck
|
||||
var SimpleUserCheck func(w http.ResponseWriter, r *http.Request, u *User) (headerLite *HeaderLite, err RouteError) = simpleUserCheck
|
||||
var UserCheck func(w http.ResponseWriter, r *http.Request, u *User) (h *Header, err RouteError) = userCheck
|
||||
var UserCheckNano func(w http.ResponseWriter, r *http.Request, u *User, nano int64) (h *Header, err RouteError) = userCheck2
|
||||
var SimpleForumUserCheck func(w http.ResponseWriter, r *http.Request, user *User, fid int) (headerLite *HeaderLite, err RouteError) = simpleForumUserCheck
|
||||
var ForumUserCheck func(w http.ResponseWriter, r *http.Request, user *User, fid int) (header *Header, err RouteError) = forumUserCheck
|
||||
var SimpleUserCheck func(w http.ResponseWriter, r *http.Request, user *User) (headerLite *HeaderLite, err RouteError) = simpleUserCheck
|
||||
var UserCheck func(w http.ResponseWriter, r *http.Request, user *User) (header *Header, err RouteError) = userCheck
|
||||
|
||||
func simpleForumUserCheck(w http.ResponseWriter, r *http.Request, u *User, fid int) (h *HeaderLite, rerr RouteError) {
|
||||
h, rerr = SimpleUserCheck(w, r, u)
|
||||
if rerr != nil {
|
||||
return h, rerr
|
||||
}
|
||||
if !Forums.Exists(fid) {
|
||||
return nil, PreError("The target forum doesn't exist.", w, r)
|
||||
}
|
||||
func simpleForumUserCheck(w http.ResponseWriter, r *http.Request, user *User, fid int) (headerLite *HeaderLite, rerr RouteError) {
|
||||
if !Forums.Exists(fid) {
|
||||
return nil, PreError("The target forum doesn't exist.", w, r)
|
||||
}
|
||||
|
||||
// Is there a better way of doing the skip AND the success flag on this hook like multiple returns?
|
||||
/*skip, rerr := h.Hooks.VhookSkippable("simple_forum_check_pre_perms", w, r, u, &fid, h)
|
||||
if skip || rerr != nil {
|
||||
return h, rerr
|
||||
}*/
|
||||
skip, rerr := H_simple_forum_check_pre_perms_hook(h.Hooks, w, r, u, &fid, h)
|
||||
if skip || rerr != nil {
|
||||
return h, rerr
|
||||
}
|
||||
// Is there a better way of doing the skip AND the success flag on this hook like multiple returns?
|
||||
if VhookSkippable["simple_forum_check_pre_perms"] != nil {
|
||||
var skip bool
|
||||
skip, rerr = RunVhookSkippable("simple_forum_check_pre_perms", w, r, user, &fid, &headerLite)
|
||||
if skip || rerr != nil {
|
||||
return headerLite, rerr
|
||||
}
|
||||
}
|
||||
|
||||
fp, err := FPStore.Get(fid, u.Group)
|
||||
if err == ErrNoRows {
|
||||
fp = BlankForumPerms()
|
||||
} else if err != nil {
|
||||
return h, InternalError(err, w, r)
|
||||
}
|
||||
cascadeForumPerms(fp, u)
|
||||
return h, nil
|
||||
fperms, err := FPStore.Get(fid, user.Group)
|
||||
if err == ErrNoRows {
|
||||
fperms = BlankForumPerms()
|
||||
} else if err != nil {
|
||||
return headerLite, InternalError(err, w, r)
|
||||
}
|
||||
cascadeForumPerms(fperms, user)
|
||||
return headerLite, nil
|
||||
}
|
||||
|
||||
func forumUserCheck(h *Header, w http.ResponseWriter, r *http.Request, u *User, fid int) (rerr RouteError) {
|
||||
if !Forums.Exists(fid) {
|
||||
return NotFound(w, r, h)
|
||||
}
|
||||
func forumUserCheck(w http.ResponseWriter, r *http.Request, user *User, fid int) (header *Header, rerr RouteError) {
|
||||
header, rerr = UserCheck(w, r, user)
|
||||
if rerr != nil {
|
||||
return header, rerr
|
||||
}
|
||||
if !Forums.Exists(fid) {
|
||||
return header, NotFound(w, r, header)
|
||||
}
|
||||
|
||||
/*skip, rerr := h.Hooks.VhookSkippable("forum_check_pre_perms", w, r, u, &fid, h)
|
||||
if skip || rerr != nil {
|
||||
return rerr
|
||||
}*/
|
||||
/*skip, rerr := VhookSkippableTest(h.Hooks, "forum_check_pre_perms", w, r, u, &fid, h)
|
||||
if skip || rerr != nil {
|
||||
return rerr
|
||||
}*/
|
||||
skip, rerr := H_forum_check_pre_perms_hook(h.Hooks, w, r, u, &fid, h)
|
||||
if skip || rerr != nil {
|
||||
return rerr
|
||||
}
|
||||
if VhookSkippable["forum_check_pre_perms"] != nil {
|
||||
var skip bool
|
||||
skip, rerr = RunVhookSkippable("forum_check_pre_perms", w, r, user, &fid, &header)
|
||||
if skip || rerr != nil {
|
||||
return header, rerr
|
||||
}
|
||||
}
|
||||
|
||||
fp, err := FPStore.Get(fid, u.Group)
|
||||
if err == ErrNoRows {
|
||||
fp = BlankForumPerms()
|
||||
} else if err != nil {
|
||||
return InternalError(err, w, r)
|
||||
}
|
||||
cascadeForumPerms(fp, u)
|
||||
h.CurrentUser = u // TODO: Use a pointer instead for CurrentUser, so we don't have to do this
|
||||
return rerr
|
||||
fperms, err := FPStore.Get(fid, user.Group)
|
||||
if err == ErrNoRows {
|
||||
fperms = BlankForumPerms()
|
||||
} else if err != nil {
|
||||
return header, InternalError(err, w, r)
|
||||
}
|
||||
cascadeForumPerms(fperms, user)
|
||||
header.CurrentUser = *user // TODO: Use a pointer instead for CurrentUser, so we don't have to do this
|
||||
return header, rerr
|
||||
}
|
||||
|
||||
// TODO: Put this on the user instance? Do we really want forum specific logic in there? Maybe, a method which spits a new pointer with the same contents as user?
|
||||
func cascadeForumPerms(fp *ForumPerms, u *User) {
|
||||
if fp.Overrides && !u.IsSuperAdmin {
|
||||
u.Perms.ViewTopic = fp.ViewTopic
|
||||
u.Perms.LikeItem = fp.LikeItem
|
||||
u.Perms.CreateTopic = fp.CreateTopic
|
||||
u.Perms.EditTopic = fp.EditTopic
|
||||
u.Perms.DeleteTopic = fp.DeleteTopic
|
||||
u.Perms.CreateReply = fp.CreateReply
|
||||
u.Perms.EditReply = fp.EditReply
|
||||
u.Perms.DeleteReply = fp.DeleteReply
|
||||
u.Perms.PinTopic = fp.PinTopic
|
||||
u.Perms.CloseTopic = fp.CloseTopic
|
||||
u.Perms.MoveTopic = fp.MoveTopic
|
||||
func cascadeForumPerms(fperms *ForumPerms, user *User) {
|
||||
if fperms.Overrides && !user.IsSuperAdmin {
|
||||
user.Perms.ViewTopic = fperms.ViewTopic
|
||||
user.Perms.LikeItem = fperms.LikeItem
|
||||
user.Perms.CreateTopic = fperms.CreateTopic
|
||||
user.Perms.EditTopic = fperms.EditTopic
|
||||
user.Perms.DeleteTopic = fperms.DeleteTopic
|
||||
user.Perms.CreateReply = fperms.CreateReply
|
||||
user.Perms.EditReply = fperms.EditReply
|
||||
user.Perms.DeleteReply = fperms.DeleteReply
|
||||
user.Perms.PinTopic = fperms.PinTopic
|
||||
user.Perms.CloseTopic = fperms.CloseTopic
|
||||
user.Perms.MoveTopic = fperms.MoveTopic
|
||||
|
||||
if len(fp.ExtData) != 0 {
|
||||
for name, perm := range fp.ExtData {
|
||||
u.PluginPerms[name] = perm
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(fperms.ExtData) != 0 {
|
||||
for name, perm := range fperms.ExtData {
|
||||
user.PluginPerms[name] = perm
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Even if they have the right permissions, the control panel is only open to supermods+. There are many areas without subpermissions which assume that the current user is a supermod+ and admins are extremely unlikely to give these permissions to someone who isn't at-least a supermod to begin with
|
||||
// TODO: Do a panel specific theme?
|
||||
func panelUserCheck(w http.ResponseWriter, r *http.Request, u *User) (h *Header, stats PanelStats, rerr RouteError) {
|
||||
theme := GetThemeByReq(r)
|
||||
h = &Header{
|
||||
Site: Site,
|
||||
Settings: SettingBox.Load().(SettingMap),
|
||||
//Themes: Themes,
|
||||
ThemesSlice: ThemesSlice,
|
||||
Theme: theme,
|
||||
CurrentUser: u,
|
||||
Hooks: GetHookTable(),
|
||||
Zone: "panel",
|
||||
Writer: w,
|
||||
IsoCode: phrases.GetLangPack().IsoCode,
|
||||
//StartedAt: time.Now(),
|
||||
StartedAt: uutils.Nanotime(),
|
||||
}
|
||||
// TODO: We should probably initialise header.ExtData
|
||||
// ? - Should we only show this in debug mode? It might be useful for detecting issues in production, if we show it there as-well
|
||||
//if user.IsAdmin {
|
||||
//h.StartedAt = time.Now()
|
||||
//}
|
||||
func panelUserCheck(w http.ResponseWriter, r *http.Request, user *User) (header *Header, stats PanelStats, rerr RouteError) {
|
||||
var theme = &Theme{Name: ""}
|
||||
|
||||
h.AddSheet(theme.Name + "/main.css")
|
||||
h.AddSheet(theme.Name + "/panel.css")
|
||||
if len(theme.Resources) > 0 {
|
||||
rlist := theme.Resources
|
||||
for _, res := range rlist {
|
||||
if res.LocID == LocGlobal || res.LocID == LocPanel {
|
||||
if res.Type == ResTypeSheet {
|
||||
h.AddSheet(res.Name)
|
||||
} else if res.Type == ResTypeScript {
|
||||
if res.Async {
|
||||
h.AddScriptAsync(res.Name)
|
||||
} else {
|
||||
h.AddScript(res.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cookie, err := r.Cookie("current_theme")
|
||||
if err == nil {
|
||||
inTheme, ok := Themes[html.EscapeString(cookie.Value)]
|
||||
if ok && !theme.HideFromThemes {
|
||||
theme = inTheme
|
||||
}
|
||||
}
|
||||
if theme.Name == "" {
|
||||
theme = Themes[DefaultThemeBox.Load().(string)]
|
||||
}
|
||||
|
||||
//h := w.Header()
|
||||
//h.Set("Content-Security-Policy", "default-src 'self'")
|
||||
header = &Header{
|
||||
Site: Site,
|
||||
Settings: SettingBox.Load().(SettingMap),
|
||||
Themes: Themes,
|
||||
Theme: theme,
|
||||
CurrentUser: *user,
|
||||
Zone: "panel",
|
||||
Writer: w,
|
||||
}
|
||||
// TODO: We should probably initialise header.ExtData
|
||||
|
||||
// TODO: GDPR. Add a global control panel notice warning the admins of staff members who don't have 2FA enabled
|
||||
stats.Users = Users.Count()
|
||||
stats.Groups = Groups.Count()
|
||||
stats.Forums = Forums.Count()
|
||||
stats.Pages = Pages.Count()
|
||||
stats.Settings = len(h.Settings)
|
||||
stats.WordFilters = WordFilters.EstCount()
|
||||
stats.Themes = len(Themes)
|
||||
stats.Reports = 0 // TODO: Do the report count. Only show open threads?
|
||||
header.AddSheet(theme.Name + "/panel.css")
|
||||
if len(theme.Resources) > 0 {
|
||||
rlist := theme.Resources
|
||||
for _, resource := range rlist {
|
||||
if resource.Location == "global" || resource.Location == "panel" {
|
||||
extarr := strings.Split(resource.Name, ".")
|
||||
ext := extarr[len(extarr)-1]
|
||||
if ext == "css" {
|
||||
header.AddSheet(resource.Name)
|
||||
} else if ext == "js" {
|
||||
header.AddScript(resource.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
addPreScript := func(name string, i int) {
|
||||
// TODO: Optimise this by removing a superfluous string alloc
|
||||
if theme.OverridenMap != nil {
|
||||
//fmt.Printf("name %+v\n", name)
|
||||
//fmt.Printf("theme.OverridenMap %+v\n", theme.OverridenMap)
|
||||
if _, ok := theme.OverridenMap[name]; ok {
|
||||
tname := "_" + theme.Name
|
||||
//fmt.Printf("tname %+v\n", tname)
|
||||
h.AddPreScriptAsync("tmpl_" + name + tname + ".js")
|
||||
return
|
||||
}
|
||||
}
|
||||
//fmt.Printf("tname %+v\n", tname)
|
||||
h.AddPreScriptAsync(ucstrs[i])
|
||||
}
|
||||
addPreScript("alert", 3)
|
||||
addPreScript("notice", 4)
|
||||
//h := w.Header()
|
||||
//h.Set("Content-Security-Policy", "default-src 'self'")
|
||||
|
||||
return h, stats, nil
|
||||
// TODO: GDPR. Add a global control panel notice warning the admins of staff members who don't have 2FA enabled
|
||||
stats.Users = Users.GlobalCount()
|
||||
stats.Groups = Groups.GlobalCount()
|
||||
stats.Forums = Forums.GlobalCount()
|
||||
stats.Pages = Pages.GlobalCount()
|
||||
stats.Settings = len(header.Settings)
|
||||
stats.WordFilters = WordFilters.EstCount()
|
||||
stats.Themes = len(Themes)
|
||||
stats.Reports = 0 // TODO: Do the report count. Only show open threads?
|
||||
|
||||
// TODO: Remove this as it might be counter-productive
|
||||
/*pusher, ok := w.(http.Pusher)
|
||||
if ok {
|
||||
pusher.Push("/static/"+theme.Name+"/main.css", nil)
|
||||
pusher.Push("/static/"+theme.Name+"/panel.css", nil)
|
||||
pusher.Push("/static/global.js", nil)
|
||||
pusher.Push("/static/jquery-3.1.1.min.js", nil)
|
||||
// TODO: Test these
|
||||
for _, sheet := range header.Stylesheets {
|
||||
pusher.Push("/static/"+sheet, nil)
|
||||
}
|
||||
for _, script := range header.Scripts {
|
||||
pusher.Push("/static/"+script, nil)
|
||||
}
|
||||
// TODO: Push avatars?
|
||||
}*/
|
||||
|
||||
return header, stats, nil
|
||||
}
|
||||
|
||||
func simplePanelUserCheck(w http.ResponseWriter, r *http.Request, u *User) (lite *HeaderLite, rerr RouteError) {
|
||||
return SimpleUserCheck(w, r, u)
|
||||
func simplePanelUserCheck(w http.ResponseWriter, r *http.Request, user *User) (headerLite *HeaderLite, rerr RouteError) {
|
||||
return simpleUserCheck(w, r, user)
|
||||
}
|
||||
|
||||
// SimpleUserCheck is back from the grave, yay :D
|
||||
func simpleUserCheck(w http.ResponseWriter, r *http.Request, u *User) (lite *HeaderLite, rerr RouteError) {
|
||||
return &HeaderLite{
|
||||
Site: Site,
|
||||
Settings: SettingBox.Load().(SettingMap),
|
||||
Hooks: GetHookTable(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func GetThemeByReq(r *http.Request) *Theme {
|
||||
theme := &Theme{Name: ""}
|
||||
cookie, e := r.Cookie("current_theme")
|
||||
if e == nil {
|
||||
inTheme, ok := Themes[html.EscapeString(cookie.Value)]
|
||||
if ok && !theme.HideFromThemes {
|
||||
theme = inTheme
|
||||
}
|
||||
}
|
||||
if theme.Name == "" {
|
||||
theme = Themes[DefaultThemeBox.Load().(string)]
|
||||
}
|
||||
return theme
|
||||
}
|
||||
|
||||
func userCheck(w http.ResponseWriter, r *http.Request, u *User) (h *Header, rerr RouteError) {
|
||||
return userCheck2(w, r, u, uutils.Nanotime())
|
||||
func simpleUserCheck(w http.ResponseWriter, r *http.Request, user *User) (headerLite *HeaderLite, rerr RouteError) {
|
||||
return &HeaderLite{
|
||||
Site: Site,
|
||||
Settings: SettingBox.Load().(SettingMap),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// TODO: Add the ability for admins to restrict certain themes to certain groups?
|
||||
// ! Be careful about firing errors off here as CustomError uses this
|
||||
func userCheck2(w http.ResponseWriter, r *http.Request, u *User, nano int64) (h *Header, rerr RouteError) {
|
||||
theme := GetThemeByReq(r)
|
||||
h = &Header{
|
||||
Site: Site,
|
||||
Settings: SettingBox.Load().(SettingMap),
|
||||
//Themes: Themes,
|
||||
ThemesSlice: ThemesSlice,
|
||||
Theme: theme,
|
||||
CurrentUser: u, // ! Some things rely on this being a pointer downstream from this function
|
||||
Hooks: GetHookTable(),
|
||||
Zone: ucstrs[0],
|
||||
Writer: w,
|
||||
IsoCode: phrases.GetLangPack().IsoCode,
|
||||
StartedAt: nano,
|
||||
}
|
||||
// TODO: Optimise this by avoiding accessing a map string index
|
||||
if !u.Loggedin {
|
||||
h.GoogSiteVerify = h.Settings["google_site_verify"].(string)
|
||||
}
|
||||
func userCheck(w http.ResponseWriter, r *http.Request, user *User) (header *Header, rerr RouteError) {
|
||||
var theme = &Theme{Name: ""}
|
||||
|
||||
if u.IsBanned {
|
||||
h.AddNotice("account_banned")
|
||||
}
|
||||
if u.Loggedin && !u.Active {
|
||||
h.AddNotice("account_inactive")
|
||||
}
|
||||
/*h.Scripts, _ = StrSlicePool.Get().([]string)
|
||||
if h.Scripts != nil {
|
||||
h.Scripts = h.Scripts[:0]
|
||||
}
|
||||
h.PreScriptsAsync, _ = StrSlicePool.Get().([]string)
|
||||
if h.PreScriptsAsync != nil {
|
||||
h.PreScriptsAsync = h.PreScriptsAsync[:0]
|
||||
}*/
|
||||
cookie, err := r.Cookie("current_theme")
|
||||
if err == nil {
|
||||
inTheme, ok := Themes[html.EscapeString(cookie.Value)]
|
||||
if ok && !theme.HideFromThemes {
|
||||
theme = inTheme
|
||||
}
|
||||
}
|
||||
if theme.Name == "" {
|
||||
theme = Themes[DefaultThemeBox.Load().(string)]
|
||||
}
|
||||
|
||||
// An optimisation so we don't populate StartedAt for users who shouldn't see the stat anyway
|
||||
// ? - Should we only show this in debug mode? It might be useful for detecting issues in production, if we show it there as-well
|
||||
//if u.IsAdmin {
|
||||
//h.StartedAt = time.Now()
|
||||
//}
|
||||
header = &Header{
|
||||
Site: Site,
|
||||
Settings: SettingBox.Load().(SettingMap),
|
||||
Themes: Themes,
|
||||
Theme: theme,
|
||||
CurrentUser: *user, // ! Some things rely on this being a pointer downstream from this function
|
||||
Zone: "frontend",
|
||||
Writer: w,
|
||||
}
|
||||
|
||||
//PrepResources(u,h,theme)
|
||||
return h, nil
|
||||
}
|
||||
if user.IsBanned {
|
||||
header.AddNotice("account_banned")
|
||||
}
|
||||
if user.Loggedin && !user.Active {
|
||||
header.AddNotice("account_inactive")
|
||||
}
|
||||
|
||||
func PrepResources(u *User, h *Header, theme *Theme) {
|
||||
h.AddSheet(theme.Name + "/main.css")
|
||||
if len(theme.Resources) > 0 {
|
||||
rlist := theme.Resources
|
||||
for _, resource := range rlist {
|
||||
if resource.Loggedin && !user.Loggedin {
|
||||
continue
|
||||
}
|
||||
if resource.Location == "global" || resource.Location == "frontend" {
|
||||
extarr := strings.Split(resource.Name, ".")
|
||||
ext := extarr[len(extarr)-1]
|
||||
if ext == "css" {
|
||||
header.AddSheet(resource.Name)
|
||||
} else if ext == "js" {
|
||||
header.AddScript(resource.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(theme.Resources) > 0 {
|
||||
rlist := theme.Resources
|
||||
for _, res := range rlist {
|
||||
if res.Loggedin && !u.Loggedin {
|
||||
continue
|
||||
}
|
||||
if res.LocID == LocGlobal || res.LocID == LocFront {
|
||||
if res.Type == ResTypeSheet {
|
||||
h.AddSheet(res.Name)
|
||||
} else if res.Type == ResTypeScript {
|
||||
if res.Async {
|
||||
h.AddScriptAsync(res.Name)
|
||||
} else {
|
||||
h.AddScript(res.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/*pusher, ok := w.(http.Pusher)
|
||||
if ok {
|
||||
pusher.Push("/static/"+theme.Name+"/main.css", nil)
|
||||
pusher.Push("/static/global.js", nil)
|
||||
pusher.Push("/static/jquery-3.1.1.min.js", nil)
|
||||
// TODO: Test these
|
||||
for _, sheet := range header.Stylesheets {
|
||||
pusher.Push("/static/"+sheet, nil)
|
||||
}
|
||||
for _, script := range header.Scripts {
|
||||
pusher.Push("/static/"+script, nil)
|
||||
}
|
||||
// TODO: Push avatars?
|
||||
}*/
|
||||
|
||||
addPreScript := func(name string, i int) {
|
||||
// TODO: Optimise this by removing a superfluous string alloc
|
||||
if theme.OverridenMap != nil {
|
||||
//fmt.Printf("name %+v\n", name)
|
||||
//fmt.Printf("theme.OverridenMap %+v\n", theme.OverridenMap)
|
||||
if _, ok := theme.OverridenMap[name]; ok {
|
||||
tname := "_" + theme.Name
|
||||
//fmt.Printf("tname %+v\n", tname)
|
||||
h.AddPreScriptAsync("tmpl_" + name + tname + ".js")
|
||||
return
|
||||
}
|
||||
}
|
||||
//fmt.Printf("tname %+v\n", tname)
|
||||
h.AddPreScriptAsync(ucstrs[i])
|
||||
}
|
||||
addPreScript("topics_topic", 1)
|
||||
addPreScript("paginator", 2)
|
||||
addPreScript("alert", 3)
|
||||
addPreScript("notice", 4)
|
||||
if u.Loggedin {
|
||||
addPreScript("topic_c_edit_post", 5)
|
||||
addPreScript("topic_c_attach_item", 6)
|
||||
addPreScript("topic_c_poll_input", 7)
|
||||
}
|
||||
}
|
||||
|
||||
func pstr(name string) string {
|
||||
return "tmpl_" + name + ".js"
|
||||
}
|
||||
|
||||
var ucstrs = [...]string{
|
||||
"frontend",
|
||||
|
||||
pstr("topics_topic"),
|
||||
pstr("paginator"),
|
||||
pstr("alert"),
|
||||
pstr("notice"),
|
||||
|
||||
pstr("topic_c_edit_post"),
|
||||
pstr("topic_c_attach_item"),
|
||||
pstr("topic_c_poll_input"),
|
||||
return header, nil
|
||||
}
|
||||
|
||||
func preRoute(w http.ResponseWriter, r *http.Request) (User, bool) {
|
||||
userptr, halt := Auth.SessionCheck(w, r)
|
||||
if halt {
|
||||
return *userptr, false
|
||||
}
|
||||
var usercpy *User = BlankUser()
|
||||
*usercpy = *userptr
|
||||
usercpy.Init() // TODO: Can we reduce the amount of work we do here?
|
||||
user, halt := Auth.SessionCheck(w, r)
|
||||
if halt {
|
||||
return *user, false
|
||||
}
|
||||
var usercpy *User = BlankUser()
|
||||
*usercpy = *user
|
||||
|
||||
// TODO: Add a config setting to disable this header
|
||||
// TODO: Have this header cover more things
|
||||
if Config.SslSchema {
|
||||
w.Header().Set("Content-Security-Policy", "upgrade-insecure-requests")
|
||||
}
|
||||
// TODO: WIP. Refactor this to eliminate the unnecessary query
|
||||
// TODO: Better take proxies into consideration
|
||||
host, _, err := net.SplitHostPort(r.RemoteAddr)
|
||||
if err != nil {
|
||||
PreError("Bad IP", w, r)
|
||||
return *usercpy, false
|
||||
}
|
||||
|
||||
// TODO: WIP. Refactor this to eliminate the unnecessary query
|
||||
// TODO: Better take proxies into consideration
|
||||
if !Config.DisableIP {
|
||||
var host string
|
||||
// TODO: Prefer Cf-Connecting-Ip header, fewer shenanigans
|
||||
if Site.HasProxy {
|
||||
// TODO: Check the right-most IP, might get tricky with multiple proxies, maybe have a setting for the number of hops we jump through
|
||||
xForwardedFor := r.Header.Get("X-Forwarded-For")
|
||||
if xForwardedFor != "" {
|
||||
forwardedFor := strings.Split(xForwardedFor, ",")
|
||||
// TODO: Check if this is a valid IP Address, reject if not
|
||||
host = forwardedFor[len(forwardedFor)-1]
|
||||
}
|
||||
}
|
||||
// TODO: Prefer Cf-Connecting-Ip header, fewer shenanigans
|
||||
if Site.HasProxy {
|
||||
// TODO: Check the right-most IP, might get tricky with multiple proxies, maybe have a setting for the number of hops we jump through
|
||||
xForwardedFor := r.Header.Get("X-Forwarded-For")
|
||||
if xForwardedFor != "" {
|
||||
forwardedFor := strings.Split(xForwardedFor, ",")
|
||||
// TODO: Check if this is a valid IP Address, reject if not
|
||||
host = forwardedFor[len(forwardedFor)-1]
|
||||
}
|
||||
}
|
||||
|
||||
if host == "" {
|
||||
var e error
|
||||
host, _, e = net.SplitHostPort(r.RemoteAddr)
|
||||
if e != nil {
|
||||
_ = PreError("Bad IP", w, r)
|
||||
return *usercpy, false
|
||||
}
|
||||
}
|
||||
// TODO: Add a config setting to disable this header
|
||||
// TODO: Have this header cover more things
|
||||
w.Header().Set("Content-Security-Policy", "upgrade-insecure-requests")
|
||||
|
||||
if !Config.DisableLastIP && usercpy.Loggedin && host != usercpy.GetIP() {
|
||||
mon := time.Now().Month()
|
||||
e := usercpy.UpdateIP(strconv.Itoa(int(mon)) + "-" + host)
|
||||
if e != nil {
|
||||
_ = InternalError(e, w, r)
|
||||
return *usercpy, false
|
||||
}
|
||||
}
|
||||
usercpy.LastIP = host
|
||||
}
|
||||
if user == &GuestUser {
|
||||
usercpy.LastIP = host
|
||||
return *usercpy, true
|
||||
}
|
||||
|
||||
return *usercpy, true
|
||||
}
|
||||
if host != usercpy.LastIP {
|
||||
err = usercpy.UpdateIP(host)
|
||||
if err != nil {
|
||||
InternalError(err, w, r)
|
||||
return *usercpy, false
|
||||
}
|
||||
usercpy.LastIP = host
|
||||
}
|
||||
|
||||
func UploadAvatar(w http.ResponseWriter, r *http.Request, u *User, tuid int) (ext string, ferr RouteError) {
|
||||
// We don't want multiple files
|
||||
// TODO: Are we doing this correctly?
|
||||
filenameMap := make(map[string]bool)
|
||||
for _, fheaders := range r.MultipartForm.File {
|
||||
for _, hdr := range fheaders {
|
||||
if hdr.Filename == "" {
|
||||
continue
|
||||
}
|
||||
filenameMap[hdr.Filename] = true
|
||||
}
|
||||
}
|
||||
if len(filenameMap) > 1 {
|
||||
return "", LocalError("You may only upload one avatar", w, r, u)
|
||||
}
|
||||
|
||||
for _, fheaders := range r.MultipartForm.File {
|
||||
for _, hdr := range fheaders {
|
||||
if hdr.Filename == "" {
|
||||
continue
|
||||
}
|
||||
inFile, err := hdr.Open()
|
||||
if err != nil {
|
||||
return "", LocalError("Upload failed", w, r, u)
|
||||
}
|
||||
defer inFile.Close()
|
||||
|
||||
if ext == "" {
|
||||
extarr := strings.Split(hdr.Filename, ".")
|
||||
if len(extarr) < 2 {
|
||||
return "", LocalError("Bad file", w, r, u)
|
||||
}
|
||||
ext = extarr[len(extarr)-1]
|
||||
|
||||
// TODO: Can we do this without a regex?
|
||||
reg, err := regexp.Compile("[^A-Za-z0-9]+")
|
||||
if err != nil {
|
||||
return "", LocalError("Bad file extension", w, r, u)
|
||||
}
|
||||
ext = reg.ReplaceAllString(ext, "")
|
||||
ext = strings.ToLower(ext)
|
||||
|
||||
if !ImageFileExts.Contains(ext) {
|
||||
return "", LocalError("You can only use an image for your avatar", w, r, u)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Centralise this string, so we don't have to change it in two different places when it changes
|
||||
outFile, err := os.Create("./uploads/avatar_" + strconv.Itoa(tuid) + "." + ext)
|
||||
if err != nil {
|
||||
return "", LocalError("Upload failed [File Creation Failed]", w, r, u)
|
||||
}
|
||||
defer outFile.Close()
|
||||
|
||||
_, err = io.Copy(outFile, inFile)
|
||||
if err != nil {
|
||||
return "", LocalError("Upload failed [Copy Failed]", w, r, u)
|
||||
}
|
||||
}
|
||||
}
|
||||
if ext == "" {
|
||||
return "", LocalError("No file", w, r, u)
|
||||
}
|
||||
return ext, nil
|
||||
}
|
||||
|
||||
func ChangeAvatar(path string, w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
e := u.ChangeAvatar(path)
|
||||
if e != nil {
|
||||
return InternalError(e, w, r)
|
||||
}
|
||||
|
||||
// Clean up the old avatar data, so we don't end up with too many dead files in /uploads/
|
||||
if len(u.RawAvatar) > 2 {
|
||||
if u.RawAvatar[0] == '.' && u.RawAvatar[1] == '.' {
|
||||
e := os.Remove("./uploads/avatar_" + strconv.Itoa(u.ID) + "_tmp" + u.RawAvatar[1:])
|
||||
if e != nil && !os.IsNotExist(e) {
|
||||
LogWarning(e)
|
||||
return LocalError("Something went wrong", w, r, u)
|
||||
}
|
||||
e = os.Remove("./uploads/avatar_" + strconv.Itoa(u.ID) + "_w48" + u.RawAvatar[1:])
|
||||
if e != nil && !os.IsNotExist(e) {
|
||||
LogWarning(e)
|
||||
return LocalError("Something went wrong", w, r, u)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return *usercpy, true
|
||||
}
|
||||
|
||||
// SuperAdminOnly makes sure that only super admin can access certain critical panel routes
|
||||
func SuperAdminOnly(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
if !u.IsSuperAdmin {
|
||||
return NoPermissions(w, r, u)
|
||||
}
|
||||
return nil
|
||||
func SuperAdminOnly(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
if !user.IsSuperAdmin {
|
||||
return NoPermissions(w, r, user)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// AdminOnly makes sure that only admins can access certain panel routes
|
||||
func AdminOnly(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
if !u.IsAdmin {
|
||||
return NoPermissions(w, r, u)
|
||||
}
|
||||
return nil
|
||||
func AdminOnly(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
if !user.IsAdmin {
|
||||
return NoPermissions(w, r, user)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SuperModeOnly makes sure that only super mods or higher can access the panel routes
|
||||
func SuperModOnly(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
if !u.IsSuperMod {
|
||||
return NoPermissions(w, r, u)
|
||||
}
|
||||
return nil
|
||||
func SuperModOnly(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
if !user.IsSuperMod {
|
||||
return NoPermissions(w, r, user)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// MemberOnly makes sure that only logged in users can access this route
|
||||
func MemberOnly(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
if !u.Loggedin {
|
||||
return LoginRequired(w, r, u)
|
||||
}
|
||||
return nil
|
||||
func MemberOnly(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
if !user.Loggedin {
|
||||
return LoginRequired(w, r, user)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// NoBanned stops any banned users from accessing this route
|
||||
func NoBanned(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
if u.IsBanned {
|
||||
return Banned(w, r, u)
|
||||
}
|
||||
return nil
|
||||
func NoBanned(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
if user.IsBanned {
|
||||
return Banned(w, r, user)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseForm(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
if e := r.ParseForm(); e != nil {
|
||||
return LocalError("Bad Form", w, r, u)
|
||||
}
|
||||
return nil
|
||||
func ParseForm(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
return LocalError("Bad Form", w, r, user)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func NoSessionMismatch(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
if e := r.ParseForm(); e != nil {
|
||||
return LocalError("Bad Form", w, r, u)
|
||||
}
|
||||
if len(u.Session) == 0 {
|
||||
return SecurityError(w, r, u)
|
||||
}
|
||||
// TODO: Try to eliminate some of these allocations
|
||||
sess := []byte(u.Session)
|
||||
if subtle.ConstantTimeCompare([]byte(r.FormValue("session")), sess) != 1 && subtle.ConstantTimeCompare([]byte(r.FormValue("s")), sess) != 1 {
|
||||
return SecurityError(w, r, u)
|
||||
}
|
||||
return nil
|
||||
func NoSessionMismatch(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
return LocalError("Bad Form", w, r, user)
|
||||
}
|
||||
if r.FormValue("session") != user.Session {
|
||||
return SecurityError(w, r, user)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func ReqIsJson(r *http.Request) bool {
|
||||
return r.Header.Get("Content-type") == "application/json"
|
||||
return r.Header.Get("Content-type") == "application/json"
|
||||
}
|
||||
|
||||
func HandleUploadRoute(w http.ResponseWriter, r *http.Request, u *User, maxFileSize int) RouteError {
|
||||
// TODO: Reuse this code more
|
||||
if r.ContentLength > int64(maxFileSize) {
|
||||
size, unit := ConvertByteUnit(float64(maxFileSize))
|
||||
return CustomError("Your upload is too big. Your files need to be smaller than "+strconv.Itoa(int(size))+unit+".", http.StatusExpectationFailed, "Error", w, r, nil, u)
|
||||
}
|
||||
r.Body = http.MaxBytesReader(w, r.Body, r.ContentLength)
|
||||
func HandleUploadRoute(w http.ResponseWriter, r *http.Request, user User, maxFileSize int) RouteError {
|
||||
// TODO: Reuse this code more
|
||||
if r.ContentLength > int64(maxFileSize) {
|
||||
size, unit := ConvertByteUnit(float64(maxFileSize))
|
||||
return CustomError("Your upload is too big. Your files need to be smaller than "+strconv.Itoa(int(size))+unit+".", http.StatusExpectationFailed, "Error", w, r, nil, user)
|
||||
}
|
||||
r.Body = http.MaxBytesReader(w, r.Body, int64(maxFileSize))
|
||||
|
||||
e := r.ParseMultipartForm(int64(Megabyte))
|
||||
if e != nil {
|
||||
return LocalError("Bad Form", w, r, u)
|
||||
}
|
||||
return nil
|
||||
err := r.ParseMultipartForm(int64(Megabyte))
|
||||
if err != nil {
|
||||
return LocalError("Bad Form", w, r, user)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func NoUploadSessionMismatch(w http.ResponseWriter, r *http.Request, u *User) RouteError {
|
||||
if len(u.Session) == 0 {
|
||||
return SecurityError(w, r, u)
|
||||
}
|
||||
// TODO: Try to eliminate some of these allocations
|
||||
sess := []byte(u.Session)
|
||||
if subtle.ConstantTimeCompare([]byte(r.FormValue("session")), sess) != 1 && subtle.ConstantTimeCompare([]byte(r.FormValue("s")), sess) != 1 {
|
||||
return SecurityError(w, r, u)
|
||||
}
|
||||
return nil
|
||||
func NoUploadSessionMismatch(w http.ResponseWriter, r *http.Request, user User) RouteError {
|
||||
if r.FormValue("session") != user.Session {
|
||||
return SecurityError(w, r, user)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
155
common/search.go
155
common/search.go
@ -1,155 +0,0 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"strconv"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
)
|
||||
|
||||
var RepliesSearch Searcher
|
||||
|
||||
type Searcher interface {
|
||||
Query(q string, zones []int) ([]int, error)
|
||||
}
|
||||
|
||||
// TODO: Implement this
|
||||
// Note: This is slow compared to something like ElasticSearch and very limited
|
||||
type SQLSearcher struct {
|
||||
queryReplies *sql.Stmt
|
||||
queryTopics *sql.Stmt
|
||||
queryRepliesZone *sql.Stmt
|
||||
queryTopicsZone *sql.Stmt
|
||||
//queryZone *sql.Stmt
|
||||
fuzzyZone *sql.Stmt
|
||||
}
|
||||
|
||||
// TODO: Support things other than MySQL
|
||||
// TODO: Use LIMIT?
|
||||
func NewSQLSearcher(acc *qgen.Accumulator) (*SQLSearcher, error) {
|
||||
if acc.GetAdapter().GetName() != "mysql" {
|
||||
return nil, errors.New("SQLSearcher only supports MySQL at this time")
|
||||
}
|
||||
return &SQLSearcher{
|
||||
queryReplies: acc.RawPrepare("SELECT tid FROM replies WHERE MATCH(content) AGAINST (? IN BOOLEAN MODE)"),
|
||||
queryTopics: acc.RawPrepare("SELECT tid FROM topics WHERE MATCH(title) AGAINST (? IN BOOLEAN MODE) OR MATCH(content) AGAINST (? IN BOOLEAN MODE)"),
|
||||
queryRepliesZone: acc.RawPrepare("SELECT tid FROM replies WHERE MATCH(content) AGAINST (? IN BOOLEAN MODE) AND tid=?"),
|
||||
queryTopicsZone: acc.RawPrepare("SELECT tid FROM topics WHERE (MATCH(title) AGAINST (? IN BOOLEAN MODE) OR MATCH(content) AGAINST (? IN BOOLEAN MODE)) AND parentID=?"),
|
||||
//queryZone: acc.RawPrepare("SELECT topics.tid FROM topics INNER JOIN replies ON topics.tid = replies.tid WHERE (topics.title=? OR (MATCH(topics.title) AGAINST (? IN BOOLEAN MODE) OR MATCH(topics.content) AGAINST (? IN BOOLEAN MODE) OR MATCH(replies.content) AGAINST (? IN BOOLEAN MODE)) OR topics.content=? OR replies.content=?) AND topics.parentID=?"),
|
||||
fuzzyZone: acc.RawPrepare("SELECT topics.tid FROM topics INNER JOIN replies ON topics.tid = replies.tid WHERE (topics.title LIKE ? OR topics.content LIKE ? OR replies.content LIKE ?) AND topics.parentID=?"),
|
||||
}, acc.FirstError()
|
||||
}
|
||||
|
||||
func (s *SQLSearcher) queryAll(q string) ([]int, error) {
|
||||
var ids []int
|
||||
run := func(stmt *sql.Stmt, q ...interface{}) error {
|
||||
rows, e := stmt.Query(q...)
|
||||
if e == sql.ErrNoRows {
|
||||
return nil
|
||||
} else if e != nil {
|
||||
return e
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var id int
|
||||
if e := rows.Scan(&id); e != nil {
|
||||
return e
|
||||
}
|
||||
ids = append(ids, id)
|
||||
}
|
||||
return rows.Err()
|
||||
}
|
||||
|
||||
err := run(s.queryReplies, q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = run(s.queryTopics, q, q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
err = sql.ErrNoRows
|
||||
}
|
||||
return ids, err
|
||||
}
|
||||
|
||||
func (s *SQLSearcher) Query(q string, zones []int) (ids []int, err error) {
|
||||
if len(zones) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
run := func(rows *sql.Rows, e error) error {
|
||||
/*if e == sql.ErrNoRows {
|
||||
return nil
|
||||
} else */if e != nil {
|
||||
return e
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var id int
|
||||
if e := rows.Scan(&id); e != nil {
|
||||
return e
|
||||
}
|
||||
ids = append(ids, id)
|
||||
}
|
||||
return rows.Err()
|
||||
}
|
||||
|
||||
if len(zones) == 1 {
|
||||
//err = run(s.queryZone.Query(q, q, q, q, q,q, zones[0]))
|
||||
err = run(s.queryRepliesZone.Query(q, zones[0]))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = run(s.queryTopicsZone.Query(q, q,zones[0]))
|
||||
} else {
|
||||
var zList string
|
||||
for _, zone := range zones {
|
||||
zList += strconv.Itoa(zone) + ","
|
||||
}
|
||||
zList = zList[:len(zList)-1]
|
||||
|
||||
acc := qgen.NewAcc()
|
||||
/*stmt := acc.RawPrepare("SELECT topics.tid FROM topics INNER JOIN replies ON topics.tid = replies.tid WHERE (MATCH(topics.title) AGAINST (? IN BOOLEAN MODE) OR MATCH(topics.content) AGAINST (? IN BOOLEAN MODE) OR MATCH(replies.content) AGAINST (? IN BOOLEAN MODE) OR topics.title=? OR topics.content=? OR replies.content=?) AND topics.parentID IN(" + zList + ")")
|
||||
if err = acc.FirstError(); err != nil {
|
||||
return nil, err
|
||||
}*/
|
||||
// TODO: Cache common IN counts
|
||||
stmt := acc.RawPrepare("SELECT tid FROM topics WHERE (MATCH(topics.title) AGAINST (? IN BOOLEAN MODE) OR MATCH(topics.content) AGAINST (? IN BOOLEAN MODE)) AND parentID IN(" + zList + ")")
|
||||
if err = acc.FirstError(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = run(stmt.Query(q, q))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stmt = acc.RawPrepare("SELECT tid FROM replies WHERE MATCH(replies.content) AGAINST (? IN BOOLEAN MODE) AND tid IN(" + zList + ")")
|
||||
if err = acc.FirstError(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = run(stmt.Query(q))
|
||||
//err = run(stmt.Query(q, q, q, q, q, q))
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
err = sql.ErrNoRows
|
||||
}
|
||||
return ids, err
|
||||
}
|
||||
|
||||
// TODO: Implement this
|
||||
type ElasticSearchSearcher struct {
|
||||
}
|
||||
|
||||
func NewElasticSearchSearcher() (*ElasticSearchSearcher, error) {
|
||||
return &ElasticSearchSearcher{}, nil
|
||||
}
|
||||
|
||||
func (s *ElasticSearchSearcher) Query(q string, zones []int) ([]int, error) {
|
||||
return nil, nil
|
||||
}
|
@ -7,7 +7,7 @@ import (
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
|
||||
qgen "git.tuxpa.in/a/gosora/query_gen"
|
||||
"../query_gen/lib"
|
||||
)
|
||||
|
||||
var SettingBox atomic.Value // An atomic value pointing to a SettingBox
|
||||
@ -16,167 +16,173 @@ var SettingBox atomic.Value // An atomic value pointing to a SettingBox
|
||||
type SettingMap map[string]interface{}
|
||||
|
||||
type SettingStore interface {
|
||||
ParseSetting(name, content, typ, constraint string) string
|
||||
BypassGet(name string) (*Setting, error)
|
||||
BypassGetAll(name string) ([]*Setting, error)
|
||||
ParseSetting(sname string, scontent string, stype string, sconstraint string) string
|
||||
BypassGet(name string) (*Setting, error)
|
||||
BypassGetAll(name string) ([]*Setting, error)
|
||||
}
|
||||
|
||||
type OptionLabel struct {
|
||||
Label string
|
||||
Value int
|
||||
Selected bool
|
||||
Label string
|
||||
Value int
|
||||
Selected bool
|
||||
}
|
||||
|
||||
type Setting struct {
|
||||
Name string
|
||||
Content string
|
||||
Type string
|
||||
Constraint string
|
||||
Name string
|
||||
Content string
|
||||
Type string
|
||||
Constraint string
|
||||
}
|
||||
|
||||
type SettingStmts struct {
|
||||
getAll *sql.Stmt
|
||||
get *sql.Stmt
|
||||
update *sql.Stmt
|
||||
getAll *sql.Stmt
|
||||
get *sql.Stmt
|
||||
update *sql.Stmt
|
||||
}
|
||||
|
||||
var settingStmts SettingStmts
|
||||
|
||||
func init() {
|
||||
SettingBox.Store(SettingMap(make(map[string]interface{})))
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
s := "settings"
|
||||
settingStmts = SettingStmts{
|
||||
getAll: acc.Select(s).Columns("name,content,type,constraints").Prepare(),
|
||||
get: acc.Select(s).Columns("content,type,constraints").Where("name=?").Prepare(),
|
||||
update: acc.Update(s).Set("content=?").Where("name=?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
SettingBox.Store(SettingMap(make(map[string]interface{})))
|
||||
DbInits.Add(func(acc *qgen.Accumulator) error {
|
||||
settingStmts = SettingStmts{
|
||||
getAll: acc.Select("settings").Columns("name, content, type, constraints").Prepare(),
|
||||
get: acc.Select("settings").Columns("content, type, constraints").Where("name = ?").Prepare(),
|
||||
update: acc.Update("settings").Set("content = ?").Where("name = ?").Prepare(),
|
||||
}
|
||||
return acc.FirstError()
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Setting) Copy() (o *Setting) {
|
||||
o = &Setting{Name: ""}
|
||||
*o = *s
|
||||
return o
|
||||
func (setting *Setting) Copy() (out *Setting) {
|
||||
out = &Setting{Name: ""}
|
||||
*out = *setting
|
||||
return out
|
||||
}
|
||||
|
||||
func LoadSettings() error {
|
||||
sBox := SettingMap(make(map[string]interface{}))
|
||||
settings, err := sBox.BypassGetAll()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var sBox = SettingMap(make(map[string]interface{}))
|
||||
settings, err := sBox.BypassGetAll()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, s := range settings {
|
||||
err = sBox.ParseSetting(s.Name, s.Content, s.Type, s.Constraint)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for _, setting := range settings {
|
||||
err = sBox.ParseSetting(setting.Name, setting.Content, setting.Type, setting.Constraint)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
SettingBox.Store(sBox)
|
||||
return nil
|
||||
SettingBox.Store(sBox)
|
||||
return nil
|
||||
}
|
||||
|
||||
// nolint
|
||||
var ErrNotInteger = errors.New("You were supposed to enter an integer x.x")
|
||||
var ErrSettingNotInteger = errors.New("Only integers are allowed in this setting x.x")
|
||||
var ErrBadConstraintNotInteger = errors.New("Invalid contraint! The constraint field wasn't an integer!")
|
||||
var ErrBadSettingRange = errors.New("Only integers between a certain range are allowed in this setting")
|
||||
|
||||
// To avoid leaking internal state to the user
|
||||
// TODO: We need to add some sort of DualError interface
|
||||
func SafeSettingError(err error) bool {
|
||||
return err == ErrNotInteger || err == ErrSettingNotInteger || err == ErrBadConstraintNotInteger || err == ErrBadSettingRange || err == ErrNoRows
|
||||
}
|
||||
|
||||
// TODO: Add better support for HTML attributes (html-attribute). E.g. Meta descriptions.
|
||||
func (sBox SettingMap) ParseSetting(name, content, typ, constraint string) (err error) {
|
||||
ssBox := map[string]interface{}(sBox)
|
||||
switch typ {
|
||||
case "bool":
|
||||
ssBox[name] = (content == "1")
|
||||
case "int":
|
||||
ssBox[name], err = strconv.Atoi(content)
|
||||
if err != nil {
|
||||
return errors.New("You were supposed to enter an integer x.x")
|
||||
}
|
||||
case "int64":
|
||||
ssBox[name], err = strconv.ParseInt(content, 10, 64)
|
||||
if err != nil {
|
||||
return errors.New("You were supposed to enter an integer x.x")
|
||||
}
|
||||
case "list":
|
||||
cons := strings.Split(constraint, "-")
|
||||
if len(cons) < 2 {
|
||||
return errors.New("Invalid constraint! The second field wasn't set!")
|
||||
}
|
||||
func (sBox SettingMap) ParseSetting(sname string, scontent string, stype string, constraint string) (err error) {
|
||||
var ssBox = map[string]interface{}(sBox)
|
||||
switch stype {
|
||||
case "bool":
|
||||
ssBox[sname] = (scontent == "1")
|
||||
case "int":
|
||||
ssBox[sname], err = strconv.Atoi(scontent)
|
||||
if err != nil {
|
||||
return ErrNotInteger
|
||||
}
|
||||
case "int64":
|
||||
ssBox[sname], err = strconv.ParseInt(scontent, 10, 64)
|
||||
if err != nil {
|
||||
return ErrNotInteger
|
||||
}
|
||||
case "list":
|
||||
cons := strings.Split(constraint, "-")
|
||||
if len(cons) < 2 {
|
||||
return errors.New("Invalid constraint! The second field wasn't set!")
|
||||
}
|
||||
|
||||
con1, err := strconv.Atoi(cons[0])
|
||||
con2, err2 := strconv.Atoi(cons[1])
|
||||
if err != nil || err2 != nil {
|
||||
return errors.New("Invalid contraint! The constraint field wasn't an integer!")
|
||||
}
|
||||
con1, err := strconv.Atoi(cons[0])
|
||||
con2, err2 := strconv.Atoi(cons[1])
|
||||
if err != nil || err2 != nil {
|
||||
return ErrBadConstraintNotInteger
|
||||
}
|
||||
|
||||
val, err := strconv.Atoi(content)
|
||||
if err != nil {
|
||||
return errors.New("Only integers are allowed in this setting x.x")
|
||||
}
|
||||
value, err := strconv.Atoi(scontent)
|
||||
if err != nil {
|
||||
return ErrSettingNotInteger
|
||||
}
|
||||
|
||||
if val < con1 || val > con2 {
|
||||
return errors.New("Only integers between a certain range are allowed in this setting")
|
||||
}
|
||||
ssBox[name] = val
|
||||
default:
|
||||
ssBox[name] = content
|
||||
}
|
||||
return nil
|
||||
if value < con1 || value > con2 {
|
||||
return ErrBadSettingRange
|
||||
}
|
||||
ssBox[sname] = value
|
||||
default:
|
||||
ssBox[sname] = scontent
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sBox SettingMap) BypassGet(name string) (*Setting, error) {
|
||||
s := &Setting{Name: name}
|
||||
err := settingStmts.get.QueryRow(name).Scan(&s.Content, &s.Type, &s.Constraint)
|
||||
return s, err
|
||||
setting := &Setting{Name: name}
|
||||
err := settingStmts.get.QueryRow(name).Scan(&setting.Content, &setting.Type, &setting.Constraint)
|
||||
return setting, err
|
||||
}
|
||||
|
||||
func (sBox SettingMap) BypassGetAll() (settingList []*Setting, err error) {
|
||||
rows, err := settingStmts.getAll.Query()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
rows, err := settingStmts.getAll.Query()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
s := &Setting{Name: ""}
|
||||
err := rows.Scan(&s.Name, &s.Content, &s.Type, &s.Constraint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
settingList = append(settingList, s)
|
||||
}
|
||||
return settingList, rows.Err()
|
||||
for rows.Next() {
|
||||
setting := &Setting{Name: ""}
|
||||
err := rows.Scan(&setting.Name, &setting.Content, &setting.Type, &setting.Constraint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
settingList = append(settingList, setting)
|
||||
}
|
||||
return settingList, rows.Err()
|
||||
}
|
||||
|
||||
func (sBox SettingMap) Update(name, content string) RouteError {
|
||||
s, err := sBox.BypassGet(name)
|
||||
if err == ErrNoRows {
|
||||
return FromError(err)
|
||||
} else if err != nil {
|
||||
return SysError(err.Error())
|
||||
}
|
||||
func (sBox SettingMap) Update(name string, content string) error {
|
||||
setting, err := sBox.BypassGet(name)
|
||||
if err == ErrNoRows {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Why is this here and not in a common function?
|
||||
if s.Type == "bool" {
|
||||
if content == "on" || content == "1" {
|
||||
content = "1"
|
||||
} else {
|
||||
content = "0"
|
||||
}
|
||||
}
|
||||
// TODO: Why is this here and not in a common function?
|
||||
if setting.Type == "bool" {
|
||||
if content == "on" || content == "1" {
|
||||
content = "1"
|
||||
} else {
|
||||
content = "0"
|
||||
}
|
||||
}
|
||||
|
||||
err = sBox.ParseSetting(name, content, s.Type, s.Constraint)
|
||||
if err != nil {
|
||||
return FromError(err)
|
||||
}
|
||||
// TODO: Make this a method or function?
|
||||
_, err = settingStmts.update.Exec(content, name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: Make this a method or function?
|
||||
_, err = settingStmts.update.Exec(content, name)
|
||||
if err != nil {
|
||||
return SysError(err.Error())
|
||||
}
|
||||
|
||||
err = LoadSettings()
|
||||
if err != nil {
|
||||
return SysError(err.Error())
|
||||
}
|
||||
return nil
|
||||
err = sBox.ParseSetting(name, content, setting.Type, setting.Constraint)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Do a reload instead?
|
||||
SettingBox.Store(sBox)
|
||||
return nil
|
||||
}
|
||||
|
480
common/site.go
480
common/site.go
@ -1,14 +1,12 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Site holds the basic settings which should be tweaked when setting up a site, we might move them to the settings table at some point
|
||||
@ -23,362 +21,194 @@ var Config = new(config)
|
||||
// Dev holds build flags and other things which should only be modified during developers or to gather additional test data
|
||||
var Dev = new(devConfig)
|
||||
|
||||
var PluginConfig = map[string]string{}
|
||||
|
||||
type site struct {
|
||||
ShortName string
|
||||
Name string
|
||||
Email string
|
||||
URL string
|
||||
Host string
|
||||
LocalHost bool // Used internally, do not modify as it will be overwritten
|
||||
Port string
|
||||
PortInt int // Alias for efficiency, do not modify, will be overwritten
|
||||
EnableSsl bool
|
||||
EnableEmails bool
|
||||
HasProxy bool
|
||||
Language string
|
||||
ShortName string
|
||||
Name string
|
||||
Email string
|
||||
URL string
|
||||
Host string
|
||||
Port string
|
||||
EnableSsl bool
|
||||
EnableEmails bool
|
||||
HasProxy bool
|
||||
Language string
|
||||
|
||||
MaxRequestSize int // Alias, do not modify, will be overwritten
|
||||
MaxRequestSize int // Alias, do not modify, will be overwritten
|
||||
}
|
||||
|
||||
type dbConfig struct {
|
||||
// Production database
|
||||
Adapter string
|
||||
Host string
|
||||
Username string
|
||||
Password string
|
||||
Dbname string
|
||||
Port string
|
||||
// Production database
|
||||
Adapter string
|
||||
Host string
|
||||
Username string
|
||||
Password string
|
||||
Dbname string
|
||||
Port string
|
||||
|
||||
// Test database. Split this into a separate variable?
|
||||
TestAdapter string
|
||||
TestHost string
|
||||
TestUsername string
|
||||
TestPassword string
|
||||
TestDbname string
|
||||
TestPort string
|
||||
// Test database. Split this into a separate variable?
|
||||
TestAdapter string
|
||||
TestHost string
|
||||
TestUsername string
|
||||
TestPassword string
|
||||
TestDbname string
|
||||
TestPort string
|
||||
}
|
||||
|
||||
type config struct {
|
||||
SslPrivkey string
|
||||
SslFullchain string
|
||||
HashAlgo string // Defaults to bcrypt, and in the future, possibly something stronger
|
||||
ConvoKey string
|
||||
SslPrivkey string
|
||||
SslFullchain string
|
||||
HashAlgo string // Defaults to bcrypt, and in the future, possibly something stronger
|
||||
|
||||
MaxRequestSizeStr string
|
||||
MaxRequestSize int
|
||||
UserCache string
|
||||
UserCacheCapacity int
|
||||
TopicCache string
|
||||
TopicCacheCapacity int
|
||||
ReplyCache string
|
||||
ReplyCacheCapacity int
|
||||
MaxRequestSizeStr string
|
||||
MaxRequestSize int
|
||||
UserCache string
|
||||
UserCacheCapacity int
|
||||
TopicCache string
|
||||
TopicCacheCapacity int
|
||||
|
||||
SMTPServer string
|
||||
SMTPUsername string
|
||||
SMTPPassword string
|
||||
SMTPPort string
|
||||
SMTPEnableTLS bool
|
||||
SMTPServer string
|
||||
SMTPUsername string
|
||||
SMTPPassword string
|
||||
SMTPPort string
|
||||
//SMTPEnableTLS bool
|
||||
|
||||
Search string
|
||||
DefaultPath string
|
||||
DefaultGroup int // Should be a setting in the database
|
||||
ActivationGroup int // Should be a setting in the database
|
||||
StaffCSS string // ? - Move this into the settings table? Might be better to implement this as Group CSS
|
||||
DefaultForum int // The forum posts go in by default, this used to be covered by the Uncategorised Forum, but we want to replace it with a more robust solution. Make this a setting?
|
||||
MinifyTemplates bool
|
||||
BuildSlugs bool // TODO: Make this a setting?
|
||||
ServerCount int
|
||||
|
||||
DefaultPath string
|
||||
DefaultGroup int // Should be a setting in the database
|
||||
ActivationGroup int // Should be a setting in the database
|
||||
StaffCSS string // ? - Move this into the settings table? Might be better to implement this as Group CSS
|
||||
DefaultForum int // The forum posts go in by default, this used to be covered by the Uncategorised Forum, but we want to replace it with a more robust solution. Make this a setting?
|
||||
MinifyTemplates bool
|
||||
BuildSlugs bool // TODO: Make this a setting?
|
||||
DisableLiveTopicList bool
|
||||
|
||||
PrimaryServer bool
|
||||
ServerCount int
|
||||
LastIPCutoff int // Currently just -1, non--1, but will accept the number of months a user's last IP should be retained for in the future before being purged. Please note that the other two cutoffs below operate off the numbers of days instead.
|
||||
PostIPCutoff int
|
||||
PollIPCutoff int
|
||||
LogPruneCutoff int
|
||||
//SelfDeleteTruncCutoff int // Personal data is stripped from the mod action rows only leaving the TID and the action for later investigation.
|
||||
|
||||
DisableIP bool
|
||||
DisableLastIP bool
|
||||
DisablePostIP bool
|
||||
DisablePollIP bool
|
||||
DisableRegLog bool
|
||||
DisableLoginLog bool
|
||||
//DisableSelfDeleteLog bool
|
||||
|
||||
DisableLiveTopicList bool
|
||||
DisableJSAntispam bool
|
||||
//LooseCSP bool
|
||||
LooseHost bool
|
||||
LoosePort bool
|
||||
SslSchema bool // Pretend we're using SSL, might be useful if a reverse-proxy terminates SSL in-front of Gosora
|
||||
DisableServerPush bool
|
||||
EnableCDNPush bool
|
||||
DisableNoavatarRange bool
|
||||
DisableDefaultNoavatar bool
|
||||
DisableAnalytics bool
|
||||
|
||||
RefNoTrack bool
|
||||
RefNoRef bool
|
||||
NoEmbed bool
|
||||
|
||||
ExtraCSPOrigins string
|
||||
StaticResBase string // /s/
|
||||
//DynStaticResBase string
|
||||
AvatarResBase string // /uploads/
|
||||
|
||||
Noavatar string // ? - Move this into the settings table?
|
||||
ItemsPerPage int // ? - Move this into the settings table?
|
||||
MaxTopicTitleLength int
|
||||
MaxUsernameLength int
|
||||
|
||||
ReadTimeout int
|
||||
WriteTimeout int
|
||||
IdleTimeout int
|
||||
|
||||
LogDir string
|
||||
DisableSuspLog bool
|
||||
DisableBadRouteLog bool
|
||||
DisableStdout bool
|
||||
DisableStderr bool
|
||||
Noavatar string // ? - Move this into the settings table?
|
||||
ItemsPerPage int // ? - Move this into the settings table?
|
||||
MaxTopicTitleLength int
|
||||
MaxUsernameLength int
|
||||
}
|
||||
|
||||
type devConfig struct {
|
||||
DebugMode bool
|
||||
SuperDebug bool
|
||||
TemplateDebug bool
|
||||
Profiling bool
|
||||
TestDB bool
|
||||
|
||||
NoFsnotify bool // Super Experimental!
|
||||
FullReqLog bool
|
||||
ExtraTmpls string // Experimental flag for adding compiled templates, we'll likely replace this with a better mechanism
|
||||
|
||||
//QuicPort int // Experimental!
|
||||
|
||||
//ExpFix1 bool // unlisted setting, experimental fix for http/1.1 conn hangs
|
||||
LogLongTick bool // unlisted setting
|
||||
LogNewLongRoute bool // unlisted setting
|
||||
Log4thLongRoute bool // unlisted setting
|
||||
|
||||
HourDBTimeout bool // unlisted setting
|
||||
DebugMode bool
|
||||
SuperDebug bool
|
||||
TemplateDebug bool
|
||||
Profiling bool
|
||||
TestDB bool
|
||||
}
|
||||
|
||||
// configHolder is purely for having a big struct to unmarshal data into
|
||||
type configHolder struct {
|
||||
Site *site
|
||||
Config *config
|
||||
Database *dbConfig
|
||||
Dev *devConfig
|
||||
Plugin map[string]string
|
||||
Site *site
|
||||
Config *config
|
||||
Database *dbConfig
|
||||
Dev *devConfig
|
||||
}
|
||||
|
||||
func LoadConfig() error {
|
||||
data, err := ioutil.ReadFile("./config/config.json")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
data, err := ioutil.ReadFile("./config/config.json")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var config configHolder
|
||||
err = json.Unmarshal(data, &config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var config configHolder
|
||||
err = json.Unmarshal(data, &config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
Site = config.Site
|
||||
Config = config.Config
|
||||
DbConfig = config.Database
|
||||
Dev = config.Dev
|
||||
PluginConfig = config.Plugin
|
||||
Site = config.Site
|
||||
Config = config.Config
|
||||
DbConfig = config.Database
|
||||
Dev = config.Dev
|
||||
|
||||
return nil
|
||||
return nil
|
||||
}
|
||||
|
||||
var noavatarCache200 []string
|
||||
var noavatarCache48 []string
|
||||
|
||||
/*var noavatarCache200Jpg []string
|
||||
var noavatarCache48Jpg []string
|
||||
var noavatarCache200Avif []string
|
||||
var noavatarCache48Avif []string*/
|
||||
|
||||
func ProcessConfig() (err error) {
|
||||
// Strip these unnecessary bits, if we find them.
|
||||
Site.URL = strings.TrimPrefix(Site.URL, "http://")
|
||||
Site.URL = strings.TrimPrefix(Site.URL, "https://")
|
||||
Site.Host = Site.URL
|
||||
Site.LocalHost = Site.Host == "localhost" || Site.Host == "127.0.0.1" || Site.Host == "::1"
|
||||
Site.PortInt, err = strconv.Atoi(Site.Port)
|
||||
if err != nil {
|
||||
return errors.New("The port must be a valid integer")
|
||||
}
|
||||
if Site.PortInt != 80 && Site.PortInt != 443 {
|
||||
Site.URL = strings.TrimSuffix(Site.URL, "/")
|
||||
Site.URL = strings.TrimSuffix(Site.URL, "\\")
|
||||
Site.URL = strings.TrimSuffix(Site.URL, ":")
|
||||
Site.URL = Site.URL + ":" + Site.Port
|
||||
}
|
||||
uurl, err := url.Parse(Site.URL)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to parse Site.URL: ")
|
||||
}
|
||||
if Site.EnableSsl {
|
||||
Config.SslSchema = Site.EnableSsl
|
||||
}
|
||||
if Config.DefaultPath == "" {
|
||||
Config.DefaultPath = "/topics/"
|
||||
}
|
||||
Config.Noavatar = strings.Replace(Config.Noavatar, "{site_url}", Site.URL, -1)
|
||||
Site.Host = Site.URL
|
||||
if Site.Port != "80" && Site.Port != "443" {
|
||||
Site.URL = strings.TrimSuffix(Site.URL, "/")
|
||||
Site.URL = strings.TrimSuffix(Site.URL, "\\")
|
||||
Site.URL = strings.TrimSuffix(Site.URL, ":")
|
||||
Site.URL = Site.URL + ":" + Site.Port
|
||||
}
|
||||
if Config.DefaultPath == "" {
|
||||
Config.DefaultPath = "/topics/"
|
||||
}
|
||||
|
||||
// TODO: Bump the size of max request size up, if it's too low
|
||||
Config.MaxRequestSize, err = strconv.Atoi(Config.MaxRequestSizeStr)
|
||||
if err != nil {
|
||||
reqSizeStr := Config.MaxRequestSizeStr
|
||||
if len(reqSizeStr) < 3 {
|
||||
return errors.New("Invalid unit for MaxRequestSizeStr")
|
||||
}
|
||||
// TODO: Bump the size of max request size up, if it's too low
|
||||
Config.MaxRequestSize, err = strconv.Atoi(Config.MaxRequestSizeStr)
|
||||
if err != nil {
|
||||
reqSizeStr := Config.MaxRequestSizeStr
|
||||
if len(reqSizeStr) < 3 {
|
||||
return errors.New("Invalid unit for MaxRequestSizeStr")
|
||||
}
|
||||
|
||||
quantity, err := strconv.Atoi(reqSizeStr[:len(reqSizeStr)-2])
|
||||
if err != nil {
|
||||
return errors.New("Unable to convert quantity to integer in MaxRequestSizeStr, found " + reqSizeStr[:len(reqSizeStr)-2])
|
||||
}
|
||||
unit := reqSizeStr[len(reqSizeStr)-2:]
|
||||
quantity, err := strconv.Atoi(reqSizeStr[:len(reqSizeStr)-2])
|
||||
if err != nil {
|
||||
return errors.New("Unable to convert quantity to integer in MaxRequestSizeStr, found " + reqSizeStr[:len(reqSizeStr)-2])
|
||||
}
|
||||
unit := reqSizeStr[len(reqSizeStr)-2:]
|
||||
|
||||
// TODO: Make it a named error just in case new errors are added in here in the future
|
||||
Config.MaxRequestSize, err = FriendlyUnitToBytes(quantity, unit)
|
||||
if err != nil {
|
||||
return errors.New("Unable to recognise unit for MaxRequestSizeStr, found " + unit)
|
||||
}
|
||||
}
|
||||
if Dev.DebugMode {
|
||||
log.Print("Set MaxRequestSize to ", Config.MaxRequestSize)
|
||||
}
|
||||
if Config.MaxRequestSize <= 0 {
|
||||
log.Fatal("MaxRequestSize should not be zero or below")
|
||||
}
|
||||
Site.MaxRequestSize = Config.MaxRequestSize
|
||||
// TODO: Make it a named error just in case new errors are added in here in the future
|
||||
Config.MaxRequestSize, err = FriendlyUnitToBytes(quantity, unit)
|
||||
if err != nil {
|
||||
return errors.New("Unable to recognise unit for MaxRequestSizeStr, found " + unit)
|
||||
}
|
||||
}
|
||||
if Dev.DebugMode {
|
||||
log.Print("Set MaxRequestSize to ", Config.MaxRequestSize)
|
||||
}
|
||||
if Config.MaxRequestSize <= 0 {
|
||||
log.Fatal("MaxRequestSize should not be zero or below")
|
||||
}
|
||||
Site.MaxRequestSize = Config.MaxRequestSize
|
||||
|
||||
local := func(h string) bool {
|
||||
return h == "localhost" || h == "127.0.0.1" || h == "::1" || h == Site.URL
|
||||
}
|
||||
uurl, err = url.Parse(Config.StaticResBase)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to parse Config.StaticResBase: ")
|
||||
}
|
||||
host := uurl.Hostname()
|
||||
if !local(host) {
|
||||
Config.ExtraCSPOrigins += " " + host
|
||||
Config.RefNoRef = true // Avoid leaking origin data to the CDN
|
||||
}
|
||||
if Config.StaticResBase != "" {
|
||||
StaticFiles.Prefix = Config.StaticResBase
|
||||
}
|
||||
// ? Find a way of making these unlimited if zero? It might rule out some optimisations, waste memory, and break layouts
|
||||
if Config.MaxTopicTitleLength == 0 {
|
||||
Config.MaxTopicTitleLength = 100
|
||||
}
|
||||
if Config.MaxUsernameLength == 0 {
|
||||
Config.MaxUsernameLength = 100
|
||||
}
|
||||
GuestUser.Avatar, GuestUser.MicroAvatar = BuildAvatar(0, "")
|
||||
|
||||
uurl, err = url.Parse(Config.AvatarResBase)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "failed to parse Config.AvatarResBase: ")
|
||||
}
|
||||
host2 := uurl.Hostname()
|
||||
if host != host2 && !local(host) {
|
||||
Config.ExtraCSPOrigins += " " + host
|
||||
Config.RefNoRef = true // Avoid leaking origin data to the CDN
|
||||
}
|
||||
if Config.AvatarResBase == "" {
|
||||
Config.AvatarResBase = "/uploads/"
|
||||
}
|
||||
if Config.HashAlgo != "" {
|
||||
// TODO: Set the alternate hash algo, e.g. argon2
|
||||
}
|
||||
|
||||
if !Config.DisableDefaultNoavatar {
|
||||
cap := 11
|
||||
noavatarCache200 = make([]string, cap)
|
||||
noavatarCache48 = make([]string, cap)
|
||||
/*noavatarCache200Jpg = make([]string, cap)
|
||||
noavatarCache48Jpg = make([]string, cap)
|
||||
noavatarCache200Avif = make([]string, cap)
|
||||
noavatarCache48Avif = make([]string, cap)*/
|
||||
for i := 0; i < cap; i++ {
|
||||
noavatarCache200[i] = StaticFiles.Prefix + "n" + strconv.Itoa(i) + "-" + strconv.Itoa(200) + ".png?i=0"
|
||||
noavatarCache48[i] = StaticFiles.Prefix + "n" + strconv.Itoa(i) + "-" + strconv.Itoa(48) + ".png?i=0"
|
||||
|
||||
/*noavatarCache200Jpg[i] = StaticFiles.Prefix + "n" + strconv.Itoa(i) + "-" + strconv.Itoa(200) + ".jpg?i=0"
|
||||
noavatarCache48Jpg[i] = StaticFiles.Prefix + "n" + strconv.Itoa(i) + "-" + strconv.Itoa(48) + ".jpg?i=0"
|
||||
|
||||
noavatarCache200Avif[i] = StaticFiles.Prefix + "n" + strconv.Itoa(i) + "-" + strconv.Itoa(200) + ".avif?i=0"
|
||||
noavatarCache48Avif[i] = StaticFiles.Prefix + "n" + strconv.Itoa(i) + "-" + strconv.Itoa(48) + ".avif?i=0"*/
|
||||
}
|
||||
}
|
||||
Config.Noavatar = strings.Replace(Config.Noavatar, "{site_url}", Site.URL, -1)
|
||||
guestAvatar = GuestAvatar{buildNoavatar(0, 200), buildNoavatar(0, 48)}
|
||||
|
||||
if Config.DisableIP {
|
||||
Config.DisableLastIP = true
|
||||
Config.DisablePostIP = true
|
||||
Config.DisablePollIP = true
|
||||
}
|
||||
|
||||
if Config.PostIPCutoff == 0 {
|
||||
Config.PostIPCutoff = 90 // Default cutoff
|
||||
}
|
||||
if Config.LogPruneCutoff == 0 {
|
||||
Config.LogPruneCutoff = 180 // Default cutoff
|
||||
}
|
||||
if Config.LastIPCutoff == 0 {
|
||||
Config.LastIPCutoff = 3 // Default cutoff
|
||||
}
|
||||
if Config.LastIPCutoff > 12 {
|
||||
Config.LastIPCutoff = 12
|
||||
}
|
||||
if Config.PollIPCutoff == 0 {
|
||||
Config.PollIPCutoff = 90 // Default cutoff
|
||||
}
|
||||
if Config.NoEmbed {
|
||||
DefaultParseSettings.NoEmbed = true
|
||||
}
|
||||
|
||||
// ? Find a way of making these unlimited if zero? It might rule out some optimisations, waste memory, and break layouts
|
||||
if Config.MaxTopicTitleLength == 0 {
|
||||
Config.MaxTopicTitleLength = 100
|
||||
}
|
||||
if Config.MaxUsernameLength == 0 {
|
||||
Config.MaxUsernameLength = 100
|
||||
}
|
||||
GuestUser.Avatar, GuestUser.MicroAvatar = BuildAvatar(0, "")
|
||||
|
||||
if Config.HashAlgo != "" {
|
||||
// TODO: Set the alternate hash algo, e.g. argon2
|
||||
}
|
||||
|
||||
if Config.LogDir == "" {
|
||||
Config.LogDir = "./logs/"
|
||||
}
|
||||
|
||||
// We need this in here rather than verifyConfig as switchToTestDB() currently overwrites the values it verifies
|
||||
if DbConfig.TestDbname == DbConfig.Dbname {
|
||||
return errors.New("Your test database can't have the same name as your production database")
|
||||
}
|
||||
if Dev.TestDB {
|
||||
SwitchToTestDB()
|
||||
}
|
||||
return nil
|
||||
// We need this in here rather than verifyConfig as switchToTestDB() currently overwrites the values it verifies
|
||||
if DbConfig.TestDbname == DbConfig.Dbname {
|
||||
return errors.New("Your test database can't have the same name as your production database")
|
||||
}
|
||||
if Dev.TestDB {
|
||||
SwitchToTestDB()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func VerifyConfig() (err error) {
|
||||
switch {
|
||||
case !Forums.Exists(Config.DefaultForum):
|
||||
err = errors.New("Invalid default forum")
|
||||
case Config.ServerCount < 1:
|
||||
err = errors.New("You can't have less than one server")
|
||||
case Config.MaxTopicTitleLength > 100:
|
||||
err = errors.New("The max topic title length cannot be over 100 as that's unable to fit in the database row")
|
||||
case Config.MaxUsernameLength > 100:
|
||||
err = errors.New("The max username length cannot be over 100 as that's unable to fit in the database row")
|
||||
}
|
||||
return err
|
||||
func VerifyConfig() error {
|
||||
if !Forums.Exists(Config.DefaultForum) {
|
||||
return errors.New("Invalid default forum")
|
||||
}
|
||||
if Config.ServerCount < 1 {
|
||||
return errors.New("You can't have less than one server")
|
||||
}
|
||||
if Config.MaxTopicTitleLength > 100 {
|
||||
return errors.New("The max topic title length cannot be over 100 as that's unable to fit in the database row")
|
||||
}
|
||||
if Config.MaxUsernameLength > 100 {
|
||||
return errors.New("The max username length cannot be over 100 as that's unable to fit in the database row")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func SwitchToTestDB() {
|
||||
DbConfig.Host = DbConfig.TestHost
|
||||
DbConfig.Username = DbConfig.TestUsername
|
||||
DbConfig.Password = DbConfig.TestPassword
|
||||
DbConfig.Dbname = DbConfig.TestDbname
|
||||
DbConfig.Port = DbConfig.TestPort
|
||||
DbConfig.Host = DbConfig.TestHost
|
||||
DbConfig.Username = DbConfig.TestUsername
|
||||
DbConfig.Password = DbConfig.TestPassword
|
||||
DbConfig.Dbname = DbConfig.TestDbname
|
||||
DbConfig.Port = DbConfig.TestPort
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user