diff --git a/DEPENDENCIES_MANUAL_INSTALL.md b/DEPENDENCIES_MANUAL_INSTALL.md index 3acec7c554..85acf61344 100644 --- a/DEPENDENCIES_MANUAL_INSTALL.md +++ b/DEPENDENCIES_MANUAL_INSTALL.md @@ -44,7 +44,7 @@ No setup is required for Redis or Elasticsearch. However, it is necessary to perform some initialization for PostgreSQL and import the latest BookBrainz database dump. -Firstly, begin downloading the [latest BookBrainz dump](http://ftp.musicbrainz.org/pub/musicbrainz/bookbrainz/latest.sql.bz2). +First, begin downloading the [latest BookBrainz dump](http://ftp.musicbrainz.org/pub/musicbrainz/bookbrainz/latest.sql.bz2). Then, uncompress the `latest.sql.bz2` file, using the bzip2 command: diff --git a/INSTALLATION_TROUBLESHOOTING.md b/INSTALLATION_TROUBLESHOOTING.md index 2607b2d2c1..23940b1d01 100644 --- a/INSTALLATION_TROUBLESHOOTING.md +++ b/INSTALLATION_TROUBLESHOOTING.md @@ -1,19 +1,19 @@ -# Installation Troubleshooting (along with some tips and tricks) +# Installation Troubleshooting (along with some tips and tricks) * General - 1. It's better for you if you do some package catalog update by + 1. It's better for you if you do some package catalog update by `sudo apt update` - 2. Error: `Can't open input file latest.sql.bz2: No such file or directory` - After downloading the data dumps, you may realize that an attempt to uncompress it using the command `bzip2 -d latest.sql.bz2` doesn’t work and gives the above error. + 2. Error: `Can't open input file latest.sql.bz2: No such file or directory` + After downloading the data dumps, you may realize that an attempt to uncompress it using the command `bzip2 -d latest.sql.bz2` doesn’t work and gives the above error. It can be solved by giving the actual path of the latest.sql.bz2 file in place of the file name such as: - `/ home/user/Desktop/latest.sql.bz2` + `/home/user/Desktop/latest.sql.bz2` - 3. Error: `fatal: unable to access 'https://github.com/path/to/repo.git/': gnutls_handshake() failed: Error in the pull function` after entering the `git clone --recursive https://github.com/bookbrainz/bookbrainz-site.git` command. + 3. Error: `fatal: unable to access 'https://github.com/path/to/repo.git/': gnutls_handshake() failed: Error in the pull function` after entering the `git clone --recursive https://github.com/bookbrainz/bookbrainz-site.git` command. At this point, you should check your internet connection. If it persists, make sure you are not working behind a proxy. 4. There are no css styles! My local page does not look like bookbrainz.org at all ! @@ -21,7 +21,7 @@ At this point, you should check your internet connection. If it persists, make s * ElasticSearch - 1. ElasticSearch requires runtime Java installed on your local machine, + 1. ElasticSearch requires runtime Java installed on your local machine, so you have to install it by For ubuntu users @@ -32,46 +32,46 @@ At this point, you should check your internet connection. If it persists, make s `java -version` - 2. When you run ElasticSearch, it seems that the process takes a very long time. + 2. When you run ElasticSearch, it seems that the process takes a very long time. To proceed the process, just let ElasticSearch to run on its own terminal, and proceed the building process by making another window of terminal - 3. If you run into an error on Docker Toolbox with Elastic Search stating an error message along the lines of: + 3. If you run into an error on Docker Toolbox with Elastic Search stating an error message along the lines of: - `Waiting for elasticsearch:9200 .elasticsearch: forward host lookup failed: Unknown host` + `Waiting for elasticsearch:9200 .elasticsearch: forward host lookup failed: Unknown host` - The cause could be the docker-machine's memory limits. you can inspect this with the command: + The cause could be the docker-machine's memory limits. you can inspect this with the command: - `docker-machine inspect machine-name` + `docker-machine inspect machine-name` - To diagnose this problem, try taking a look at the logs with the command: + To diagnose this problem, try taking a look at the logs with the command: - `docker-compose logs elasticsearch` + `docker-compose logs elasticsearch` - And if you see an error within the logs along the lines of: + And if you see an error within the logs along the lines of: ``` - # There is insufficient memory for the Java Runtime Environment to continue. + # There is insufficient memory for the Java Runtime Environment to continue. # Native memory allocation (mmap) failed to map 2060255232 bytes for committing reserved memory. ``` Please try recreating the default docker machine by: - i. Remove default docker-machine with the command: + i. Remove default docker-machine with the command: - `docker-machine rm default` + `docker-machine rm default` - ii. Create a new default machine with the command: + ii. Create a new default machine with the command: ``` docker-machine create -d virtualbox --virtualbox-cpu-count=2 --virtualbox-memory=4096 --virtualbox-disk-size=50000 default - ``` - iii. Restart your docker environment with the commands: + ``` + iii. Restart your docker environment with the commands: ``` docker-machine stop exit - ``` + ``` 4. To check if port is already is in use or not run `netstat -anp tcp | grep ` @@ -85,7 +85,7 @@ At this point, you should check your internet connection. If it persists, make s `/etc/init.d/redis-server stop` 2. Sometimes the port 6379 on which redis server runs is used by TCP. So to terminate this process run - `sudo kill sudo 'lsof -t -i:5432'` + `sudo kill sudo 'lsof -t -i:5432'` * PostgreSQL @@ -94,7 +94,7 @@ At this point, you should check your internet connection. If it persists, make s `sudo -u postgres psql` - then + then ``` psql (12.3) Type "help" for help. @@ -109,7 +109,7 @@ At this point, you should check your internet connection. If it persists, make s then - `Password for user : ` + `Password for user : ` will appear. Use the username for the config later on config.json. diff --git a/scripts/clean-public-collection-dump-tables.sql b/scripts/clean-public-collection-dump-tables.sql new file mode 100755 index 0000000000..92dd65049d --- /dev/null +++ b/scripts/clean-public-collection-dump-tables.sql @@ -0,0 +1,4 @@ +-- These temporary table were created to dump only the public collections +DROP TABLE tmp_public_user_collection CASCADE; +DROP TABLE tmp_public_user_collection_item CASCADE; +DROP TABLE tmp_public_user_collection_collaborator CASCADE; \ No newline at end of file diff --git a/scripts/create-dumps.sh b/scripts/create-dumps.sh index 0c3ce6961a..c608ac1f89 100755 --- a/scripts/create-dumps.sh +++ b/scripts/create-dumps.sh @@ -8,19 +8,67 @@ source /home/bookbrainz/bookbrainz-site/scripts/config.sh pushd /home/bookbrainz/data/dumps +PRIVATE_DUMP_FILE=bookbrainz-full-dump-`date -I`.sql DUMP_FILE=bookbrainz-dump-`date -I`.sql +COLLECTIONS_DUMP_FILE=bookbrainz-collections-dump-`date -I`.sql +BACKUP_FOLDER=/home/bookbrainz/backup -echo "Creating data dump..." +echo "Creating private data dump..." +# Dump new backup to /tmp +pg_dump\ + -h $POSTGRES_HOST \ + -p $POSTGRES_PORT \ + -U bookbrainz \ + --serializable-deferrable\ + bookbrainz > /tmp/$PRIVATE_DUMP_FILE +echo "Main private dump created!" +echo "Creating public data dump..." # Dump new backup to /tmp pg_dump\ -h $POSTGRES_HOST \ -p $POSTGRES_PORT \ -U bookbrainz \ -T _editor_entity_visits\ + -T user_collection\ + -T user_collection_collaborator\ + -T user_collection_item\ --serializable-deferrable\ bookbrainz > /tmp/$DUMP_FILE -echo "Dump created!" +echo "Main public dump created!" + +echo "Creating public collections dump..." +# Create tables with public collections and items +psql -h $POSTGRES_HOST \ + -p $POSTGRES_PORT \ + -U bookbrainz \ + -d bookbrainz \ + < /home/bookbrainz/bookbrainz-site/scripts/create-public-collection-dumps.sql + +# Dump public collections backup to /tmp +pg_dump\ + -h $POSTGRES_HOST \ + -p $POSTGRES_PORT \ + -U bookbrainz \ + -t tmp_public_user_collection\ + -t tmp_public_user_collection_item\ + -t tmp_public_user_collection_collaborator\ + --serializable-deferrable\ + bookbrainz \ + > /tmp/$COLLECTIONS_DUMP_FILE +echo "Public collections dump created!" + +echo "Concatenating dump files" +sed 's/[pP]ublic_user_collection/user_collection/g' /tmp/$COLLECTIONS_DUMP_FILE >> /tmp/$DUMP_FILE + +echo "Cleaning up temporary public collections tables" +psql\ + -h $POSTGRES_HOST \ + -p $POSTGRES_PORT \ + -U bookbrainz \ + bookbrainz < /home/bookbrainz/bookbrainz-site/scripts/clean-public-collection-dump-tables.sql +rm /tmp/$COLLECTIONS_DUMP_FILE +echo "Temporary public collections tables removed" # Compress new backup and move to dump dir echo "Compressing..." @@ -29,10 +77,15 @@ bzip2 /tmp/$DUMP_FILE mv /tmp/$DUMP_FILE.bz2 . echo "Compressed!" +echo "Compressing and moving full private dump" +bzip2 /tmp/$PRIVATE_DUMP_FILE +mv /tmp/$PRIVATE_DUMP_FILE.bz2 $BACKUP_FOLDER +echo "Full private dump compressed and moved" + echo "Removing old dumps..." rm -f /tmp/*.sql -# Remove backups older than 8 days -find ./ -name '*.sql.bz2' -type f -mtime +7 -print | xargs /bin/rm -f +# Remove backups older than 8 days both in this directory and the full dump backup directory +find ./ $BACKUP_FOLDER -name '*.sql.bz2' -type f -mtime +7 -print | xargs /bin/rm -f echo "Done!" rm -f latest.sql.bz2 diff --git a/scripts/create-public-collection-dumps.sql b/scripts/create-public-collection-dumps.sql new file mode 100644 index 0000000000..a42a92628e --- /dev/null +++ b/scripts/create-public-collection-dumps.sql @@ -0,0 +1,36 @@ +BEGIN TRANSACTION; + +-- duplicate user_collection table with public collections only + +CREATE table if not exists tmp_public_user_collection (LIKE bookbrainz.user_collection INCLUDING ALL); +ALTER TABLE bookbrainz.tmp_public_user_collection + ADD CONSTRAINT tmp_public_user_collection_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES editor(id); + +INSERT INTO tmp_public_user_collection + select * from user_collection uc where uc.public is true; + +-- duplicate user_collection_item table with public collections' items only + +CREATE table if not exists tmp_public_user_collection_item (LIKE bookbrainz.user_collection_item INCLUDING ALL); +ALTER TABLE bookbrainz.tmp_public_user_collection_item + ADD CONSTRAINT tmp_public_user_collection_item_bbid_fkey FOREIGN KEY (bbid) REFERENCES entity(bbid); + +ALTER TABLE bookbrainz.tmp_public_user_collection_item + ADD CONSTRAINT tmp_public_user_collection_item_collection_id_fkey FOREIGN KEY (collection_id) REFERENCES bookbrainz.tmp_public_user_collection(id) ON DELETE CASCADE; + + +INSERT INTO tmp_public_user_collection_item + select uci.* from user_collection_item uci right join tmp_public_user_collection on uci.collection_id = tmp_public_user_collection.id; + +-- duplicate user_collection_collaborator table with public collections' collaborators only + +CREATE table if not exists tmp_public_user_collection_collaborator (LIKE bookbrainz.user_collection_collaborator INCLUDING ALL); +ALTER TABLE bookbrainz.tmp_public_user_collection_collaborator + ADD CONSTRAINT tmp_public_user_collection_collaborator_collaborator_id_fkey FOREIGN KEY (collaborator_id) REFERENCES editor(id); +ALTER TABLE bookbrainz.tmp_public_user_collection_collaborator + ADD CONSTRAINT tmp_public_user_collection_collaborator_collection_id_fkey FOREIGN KEY (collection_id) REFERENCES bookbrainz.tmp_public_user_collection(id) ON DELETE CASCADE; + +INSERT INTO tmp_public_user_collection_collaborator + select ucc.* from user_collection_collaborator ucc inner join tmp_public_user_collection on ucc.collection_id = tmp_public_user_collection.id; + +COMMIT; \ No newline at end of file diff --git a/src/client/components/forms/registration-details.js b/src/client/components/forms/registration-details.js index 845a8126d5..1386f3e692 100644 --- a/src/client/components/forms/registration-details.js +++ b/src/client/components/forms/registration-details.js @@ -113,7 +113,7 @@ class RegistrationForm extends React.Component { onSubmit={this.handleSubmit} >

- Firstly, please check that your display + First, please check that your display name is correct. This is the name that other editors will get to know you by.