From ed9860273d3db604c103e978c958e7158b3d8fd5 Mon Sep 17 00:00:00 2001 From: Westley Date: Fri, 15 Sep 2023 00:11:23 +1000 Subject: [PATCH 1/4] Attempted to integrate sqitch into the dockerfiles and ported our sql files into it, along with scripts to interact with sqitch, no idea if it works. --- backend/Dockerfile | 28 +- backend/docs/WritingTests.md | 254 ++-- backend/endpoints/tests/volume_test.go | 224 ++-- docker-compose.yml | 186 +-- frontend/.dockerignore | 14 +- frontend/Dockerfile | 54 +- .../CreateCodeBlock-Styled.tsx | 0 .../CreateCodeBlock.stories.tsx | 0 .../CreateCodeBlock.tsx | 0 .../index.ts | 2 +- .../src/cse-ui-kit/assets/bold-button.svg | 32 +- .../cse-ui-kit/assets/centeralign-button.svg | 110 +- .../src/cse-ui-kit/assets/italics-button.svg | 80 +- .../assets/leftrightalign-button.svg | 110 +- .../cse-ui-kit/assets/underline-button.svg | 96 +- .../src/cse-ui-kit/assets/upload-content.svg | 100 +- sqitch/Dockerfile | 1 + sqitch/deploy/01-create_migration_table.sql | 18 + sqitch/deploy/02-create_frontend_table.sql | 12 + sqitch/deploy/03-create_groups_table.sql | 18 + sqitch/deploy/04-create_person_table.sql | 38 + sqitch/deploy/05-create_filesystem_table.sql | 93 ++ sqitch/deploy/06-create_dummy_data.sql | 68 + sqitch/revert/01-create_migration_table.sql | 8 + sqitch/revert/02-create_frontend_table.sql | 8 + sqitch/revert/03-create_groups_table.sql | 9 + sqitch/revert/04-create_person_table.sql | 11 + sqitch/revert/05-create_filesystem_table.sql | 11 + sqitch/revert/06-create_dummy_data.sql | 7 + sqitch/sqitch.bat | 51 + sqitch/sqitch.conf | 10 + sqitch/sqitch.plan | 9 + sqitch/sqitch.sh | 68 + sqitch/verify/01-create_migration_table.sql | 7 + sqitch/verify/02-create_frontend_table.sql | 7 + sqitch/verify/03-create_groups_table.sql | 7 + sqitch/verify/04-create_person_table.sql | 7 + sqitch/verify/05-create_filesystem_table.sql | 7 + sqitch/verify/06-create_dummy_data.sql | 7 + utilities/createUsers.sh | 0 .../.paket/Paket.Restore.targets | 1114 ++++++++--------- utilities/ghost-exporter/Main.fs | 48 +- 42 files changed, 1712 insertions(+), 1222 deletions(-) rename frontend/src/cse-ui-kit/{CreateCodeBlock_button => CreateCodeBlock_button}/CreateCodeBlock-Styled.tsx (100%) rename frontend/src/cse-ui-kit/{CreateCodeBlock_button => CreateCodeBlock_button}/CreateCodeBlock.stories.tsx (100%) rename frontend/src/cse-ui-kit/{CreateCodeBlock_button => CreateCodeBlock_button}/CreateCodeBlock.tsx (100%) rename frontend/src/cse-ui-kit/{CreateCodeBlock_button => CreateCodeBlock_button}/index.ts (60%) create mode 100644 sqitch/Dockerfile create mode 100644 sqitch/deploy/01-create_migration_table.sql create mode 100644 sqitch/deploy/02-create_frontend_table.sql create mode 100644 sqitch/deploy/03-create_groups_table.sql create mode 100644 sqitch/deploy/04-create_person_table.sql create mode 100644 sqitch/deploy/05-create_filesystem_table.sql create mode 100644 sqitch/deploy/06-create_dummy_data.sql create mode 100644 sqitch/revert/01-create_migration_table.sql create mode 100644 sqitch/revert/02-create_frontend_table.sql create mode 100644 sqitch/revert/03-create_groups_table.sql create mode 100644 sqitch/revert/04-create_person_table.sql create mode 100644 sqitch/revert/05-create_filesystem_table.sql create mode 100644 sqitch/revert/06-create_dummy_data.sql create mode 100644 sqitch/sqitch.bat create mode 100644 sqitch/sqitch.conf create mode 100644 sqitch/sqitch.plan create mode 100644 sqitch/sqitch.sh create mode 100644 sqitch/verify/01-create_migration_table.sql create mode 100644 sqitch/verify/02-create_frontend_table.sql create mode 100644 sqitch/verify/03-create_groups_table.sql create mode 100644 sqitch/verify/04-create_person_table.sql create mode 100644 sqitch/verify/05-create_filesystem_table.sql create mode 100644 sqitch/verify/06-create_dummy_data.sql mode change 100755 => 100644 utilities/createUsers.sh diff --git a/backend/Dockerfile b/backend/Dockerfile index 0197710ea..09258ea23 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,15 +1,15 @@ -FROM golang:1.19-alpine as app-builder -WORKDIR /go/src/app -COPY . . -RUN apk add git -# Static build required so that we can safely copy the binary over. -# `-tags timetzdata` embeds zone info from the "time/tzdata" package. -RUN CGO_ENABLED=0 go install -ldflags '-extldflags "-static"' -tags timetzdata - -FROM scratch -# the test program: -COPY --from=app-builder /go/bin/cms.csesoc.unsw.edu.au /cms.csesoc.unsw.edu.au -# the tls certificates: -# NB: this pulls directly from the upstream image, which already has ca-certificates: -COPY --from=alpine:latest /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ +FROM golang:1.19-alpine as app-builder +WORKDIR /go/src/app +COPY . . +RUN apk add git +# Static build required so that we can safely copy the binary over. +# `-tags timetzdata` embeds zone info from the "time/tzdata" package. +RUN CGO_ENABLED=0 go install -ldflags '-extldflags "-static"' -tags timetzdata + +FROM scratch +# the test program: +COPY --from=app-builder /go/bin/cms.csesoc.unsw.edu.au /cms.csesoc.unsw.edu.au +# the tls certificates: +# NB: this pulls directly from the upstream image, which already has ca-certificates: +COPY --from=alpine:latest /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ ENTRYPOINT ["/cms.csesoc.unsw.edu.au"] \ No newline at end of file diff --git a/backend/docs/WritingTests.md b/backend/docs/WritingTests.md index d24e6e16a..e48c1989e 100644 --- a/backend/docs/WritingTests.md +++ b/backend/docs/WritingTests.md @@ -1,128 +1,128 @@ -# Writing Tests -The following is just a small guide on the process of unit testing within our codebase. Most if not all of our unit tests are written using a combination of `gomock` (Go's mocking framework) and Go's inbuilt testing engine `go test`. - -## Using Go Test -Go has a really nice and clean testing library in the form of `testing`. Testing is usually done on a package/file level, that is for each file/package we have an appropriate set of tests. To mark a file as a test file we simply add the suffix `_test` to the end of it, eg. `concurrency.go` would have the corresponding test file `concurrency_test.go`. Generally test files look something like -```go -package math - -import ( - "testing" - // a nice library we use to make assertations a lot cleaner - "github.com/stretchr/testify/assert" -) - -// Tests must be marked with the "Test" prefix, this tells go that the following method is a test -// under the hood what actually happens is that go test is a code generation tool, this code generation tool generates a -// main function that invokes all methods starting with "Test", it then compiles and runs this generated file -func TestAdd(t *testing.T) { - // The normal Go way - result := 1 + 2 - if result != 3 { - t.ErrorF("1 + 2 wasnt 3 :O") - } - - // Our way of writing this test - assert := assert.New(t) - assert.Equal(1 + 2, 3) -} -``` -Once you've written your test it can be run with: -```sh -# To run specifically this test -go test myTest_test.go -# To run all tests -go test ./... -``` - -## Project Specific Quirks -Theres some weird quirks when it comes to writing project specific tests, this includes stuff such as interface mocking and writing database bound tests. - -### Database Bound Tests -Generally it is preffered if your tests do not touch an actual database (see the section on mocking for how to acheive this) but sometimes it is just unavoidable, eg. you may be writing a test in the `repository` package which is inherently database bound. To allow you to write such tests we require that database-bound tests are wrapped in a transaction, luckilly there is a convenient package (+ some really hacky code that should be refactored someday) that both enforces that requirement and helps you acheive it. - -#### Testing Contexts -We write database bound tests by using a `testing context`, a testing context refers to a connection to any database, this can be the live CMS database, your local version or a test database you spun up for testing. All contexts implement the [context interface](https://github.com/csesoc/website/blob/main/backend/database/contexts/main.go#L25), all database queries should be made via the context interface. - -There are two main implementations of the context interface, those are the `liveContext` and the `TestingContext`, the `liveContext` will panic whenever it is used within a test, so when writing tests make sure your pass a `testingContext` as an argument. The implementation that `TestingContext` exposes wraps every SQL query in a transaction and rolls it back upon completion, this gives us complete isolation between unit tests. We can write a simple database bound test as follows -```go -func TestRootInsert(t *testing.T) { - testContext := database.GetDatabaseContext().(*contexts.TestingContext) - - // to start a transaction (a test) we have to wrap the test in the runTest function, if we do not run our tests - // via this function then the context panics whenever you try and send a query (for good reason :P) - testContext.RunTest(func() { - // your test here - }) - - // after your test finishes the context rollsback the constructed transaction -} -``` - -As a side-note, don't go looking around the database package, its a bit of a mess 😔, I'll have a refactoring ticket created some day to just clean up that package. - -### Interface Mocking -As should have been rather evident from the last section, writing database bounds tests can be a bit of a pain, we need to wrap our tests in a transaction to try and ensure complete test isolation. There is a potential workaround however and that is via writing better tests 😛. Consider a simple endpoint function that we wish to unit test, this endpoint will depend on: - - A database connection - - To access the Filesystem, Groups and Users tables - - A connection to our published/unpublished volumes - - A log - -When writing tests for this endpoint the naive thing to do would be to create a testing context, spin up a connection to the published/unpublished volumes and pass that into your function and finally assert that it did what you wanted it to. The issue with this approach is that now you're not just testing your singular function but your ENTIRE system which leads to really slow tests and makes refactoring a pain (since you may have to update several unrelated tests). The smarter approach would be to use a method known as interface mocking, the idea behind interface mocking is that we provide a fake implementation of our interfaces to our functions, we then use that fake implementation to asser that the function did exactly what we wanted it to do. Within the CMS we use `gomock` to generate interface mocks, a general guide on interface mocking and `gomock` can be found [here](https://thedevelopercafe.com/articles/mocking-interfaces-in-go-with-gomock-670b1a640b00). Generally when writing tests the only mocks that will be of importance to you are the `dependency factory` and `repository` mocks, these can be found [here](https://github.com/csesoc/website/blob/main/backend/database/repositories/mocks/models_mock.go), to use them simply import that package into your file. - -An example of how we use interface mocking in practice can be seen below: -```go -func TestValidEntityInfo(t *testing.T) { - controller := gomock.NewController(t) - assert := assert.New(t) - defer controller.Finish() - - // ==== test setup ===== - entityID := uuid.New() - - // Constructing a fake filesystem repository mock - // note that we feed it a "fake" implementation, we're basically saying that whenever this fake function is called - // return this fake data we set it up with - mockFileRepo := repMocks.NewMockIFilesystemRepository(controller) - mockFileRepo.EXPECT().GetEntryWithID(entityID).Return(repositories.FilesystemEntry{ - EntityID: entityID, - LogicalName: "random name", - IsDocument: false, - ParentFileID: repositories.FilesystemRootID, - ChildrenIDs: []uuid.UUID{}, - }, nil).Times(1) - - // creates a dependecy factory mock - mockDepFactory := createMockDependencyFactory(controller, mockFileRepo, true) - - // ==== test execution ===== - form := models.ValidInfoRequest{EntityID: entityID} - response := endpoints.GetEntityInfo(form, mockDepFactory) - - assert.Equal(response.Status, http.StatusOK) - // and notice down here how we're asserting that the fake data was created - assert.Equal(response.Response, models.EntityInfoResponse{ - EntityID: entityID, - EntityName: "random name", - IsDocument: false, - Parent: repositories.FilesystemRootID, - Children: []models.EntityInfoResponse{}, - }) -} - - -// createMockDependencyFactory just constructs an instance of a dependency factory mock -func createMockDependencyFactory(controller *gomock.Controller, mockFileRepo *repMocks.MockIFilesystemRepository, needsLogger bool) *mocks.MockDependencyFactory { - mockDepFactory := mocks.NewMockDependencyFactory(controller) - mockDepFactory.EXPECT().GetFilesystemRepo().Return(mockFileRepo) - - if needsLogger { - log := logger.OpenLog("new log") - mockDepFactory.EXPECT().GetLogger().Return(log) - } - - return mockDepFactory -} -``` - +# Writing Tests +The following is just a small guide on the process of unit testing within our codebase. Most if not all of our unit tests are written using a combination of `gomock` (Go's mocking framework) and Go's inbuilt testing engine `go test`. + +## Using Go Test +Go has a really nice and clean testing library in the form of `testing`. Testing is usually done on a package/file level, that is for each file/package we have an appropriate set of tests. To mark a file as a test file we simply add the suffix `_test` to the end of it, eg. `concurrency.go` would have the corresponding test file `concurrency_test.go`. Generally test files look something like +```go +package math + +import ( + "testing" + // a nice library we use to make assertations a lot cleaner + "github.com/stretchr/testify/assert" +) + +// Tests must be marked with the "Test" prefix, this tells go that the following method is a test +// under the hood what actually happens is that go test is a code generation tool, this code generation tool generates a +// main function that invokes all methods starting with "Test", it then compiles and runs this generated file +func TestAdd(t *testing.T) { + // The normal Go way + result := 1 + 2 + if result != 3 { + t.ErrorF("1 + 2 wasnt 3 :O") + } + + // Our way of writing this test + assert := assert.New(t) + assert.Equal(1 + 2, 3) +} +``` +Once you've written your test it can be run with: +```sh +# To run specifically this test +go test myTest_test.go +# To run all tests +go test ./... +``` + +## Project Specific Quirks +Theres some weird quirks when it comes to writing project specific tests, this includes stuff such as interface mocking and writing database bound tests. + +### Database Bound Tests +Generally it is preffered if your tests do not touch an actual database (see the section on mocking for how to acheive this) but sometimes it is just unavoidable, eg. you may be writing a test in the `repository` package which is inherently database bound. To allow you to write such tests we require that database-bound tests are wrapped in a transaction, luckilly there is a convenient package (+ some really hacky code that should be refactored someday) that both enforces that requirement and helps you acheive it. + +#### Testing Contexts +We write database bound tests by using a `testing context`, a testing context refers to a connection to any database, this can be the live CMS database, your local version or a test database you spun up for testing. All contexts implement the [context interface](https://github.com/csesoc/website/blob/main/backend/database/contexts/main.go#L25), all database queries should be made via the context interface. + +There are two main implementations of the context interface, those are the `liveContext` and the `TestingContext`, the `liveContext` will panic whenever it is used within a test, so when writing tests make sure your pass a `testingContext` as an argument. The implementation that `TestingContext` exposes wraps every SQL query in a transaction and rolls it back upon completion, this gives us complete isolation between unit tests. We can write a simple database bound test as follows +```go +func TestRootInsert(t *testing.T) { + testContext := database.GetDatabaseContext().(*contexts.TestingContext) + + // to start a transaction (a test) we have to wrap the test in the runTest function, if we do not run our tests + // via this function then the context panics whenever you try and send a query (for good reason :P) + testContext.RunTest(func() { + // your test here + }) + + // after your test finishes the context rollsback the constructed transaction +} +``` + +As a side-note, don't go looking around the database package, its a bit of a mess 😔, I'll have a refactoring ticket created some day to just clean up that package. + +### Interface Mocking +As should have been rather evident from the last section, writing database bounds tests can be a bit of a pain, we need to wrap our tests in a transaction to try and ensure complete test isolation. There is a potential workaround however and that is via writing better tests 😛. Consider a simple endpoint function that we wish to unit test, this endpoint will depend on: + - A database connection + - To access the Filesystem, Groups and Users tables + - A connection to our published/unpublished volumes + - A log + +When writing tests for this endpoint the naive thing to do would be to create a testing context, spin up a connection to the published/unpublished volumes and pass that into your function and finally assert that it did what you wanted it to. The issue with this approach is that now you're not just testing your singular function but your ENTIRE system which leads to really slow tests and makes refactoring a pain (since you may have to update several unrelated tests). The smarter approach would be to use a method known as interface mocking, the idea behind interface mocking is that we provide a fake implementation of our interfaces to our functions, we then use that fake implementation to asser that the function did exactly what we wanted it to do. Within the CMS we use `gomock` to generate interface mocks, a general guide on interface mocking and `gomock` can be found [here](https://thedevelopercafe.com/articles/mocking-interfaces-in-go-with-gomock-670b1a640b00). Generally when writing tests the only mocks that will be of importance to you are the `dependency factory` and `repository` mocks, these can be found [here](https://github.com/csesoc/website/blob/main/backend/database/repositories/mocks/models_mock.go), to use them simply import that package into your file. + +An example of how we use interface mocking in practice can be seen below: +```go +func TestValidEntityInfo(t *testing.T) { + controller := gomock.NewController(t) + assert := assert.New(t) + defer controller.Finish() + + // ==== test setup ===== + entityID := uuid.New() + + // Constructing a fake filesystem repository mock + // note that we feed it a "fake" implementation, we're basically saying that whenever this fake function is called + // return this fake data we set it up with + mockFileRepo := repMocks.NewMockIFilesystemRepository(controller) + mockFileRepo.EXPECT().GetEntryWithID(entityID).Return(repositories.FilesystemEntry{ + EntityID: entityID, + LogicalName: "random name", + IsDocument: false, + ParentFileID: repositories.FilesystemRootID, + ChildrenIDs: []uuid.UUID{}, + }, nil).Times(1) + + // creates a dependecy factory mock + mockDepFactory := createMockDependencyFactory(controller, mockFileRepo, true) + + // ==== test execution ===== + form := models.ValidInfoRequest{EntityID: entityID} + response := endpoints.GetEntityInfo(form, mockDepFactory) + + assert.Equal(response.Status, http.StatusOK) + // and notice down here how we're asserting that the fake data was created + assert.Equal(response.Response, models.EntityInfoResponse{ + EntityID: entityID, + EntityName: "random name", + IsDocument: false, + Parent: repositories.FilesystemRootID, + Children: []models.EntityInfoResponse{}, + }) +} + + +// createMockDependencyFactory just constructs an instance of a dependency factory mock +func createMockDependencyFactory(controller *gomock.Controller, mockFileRepo *repMocks.MockIFilesystemRepository, needsLogger bool) *mocks.MockDependencyFactory { + mockDepFactory := mocks.NewMockDependencyFactory(controller) + mockDepFactory.EXPECT().GetFilesystemRepo().Return(mockFileRepo) + + if needsLogger { + log := logger.OpenLog("new log") + mockDepFactory.EXPECT().GetLogger().Return(log) + } + + return mockDepFactory +} +``` + And thats it! That's how we do testing in the CMS. Our current test suite isn't particularly expansive and thats definitely something we're trying to improve at the moment before we move on to the next team. \ No newline at end of file diff --git a/backend/endpoints/tests/volume_test.go b/backend/endpoints/tests/volume_test.go index 65da3454e..b1e427d17 100644 --- a/backend/endpoints/tests/volume_test.go +++ b/backend/endpoints/tests/volume_test.go @@ -1,112 +1,112 @@ -package tests - -import ( - "io/ioutil" - "net/http" - "os" - "testing" - - "cms.csesoc.unsw.edu.au/database/repositories" - repMocks "cms.csesoc.unsw.edu.au/database/repositories/mocks" - "cms.csesoc.unsw.edu.au/endpoints" - "cms.csesoc.unsw.edu.au/endpoints/models" - "github.com/golang/mock/gomock" - "github.com/google/uuid" - "github.com/stretchr/testify/assert" -) - -func TestUploadDocument(t *testing.T) { -} - -func TestGetPublishedDocument(t *testing.T) { - controller := gomock.NewController(t) - assert := assert.New(t) - defer controller.Finish() - - // ==== test setup ===== - entityID := uuid.New() - - tempFile, _ := ioutil.TempFile(os.TempDir(), "expected") - if _, err := tempFile.WriteString("hello world"); err != nil { - panic(err) - } - tempFile.Seek(0, 0) - defer os.Remove(tempFile.Name()) - - mockDockerFileSystemRepo := repMocks.NewMockIPublishedVolumeRepository(controller) - mockDockerFileSystemRepo.EXPECT().GetFromVolume(entityID.String()).Return(tempFile, nil).Times(1) - - mockDepFactory := createMockDependencyFactory(controller, nil, true) - mockDepFactory.EXPECT().GetPublishedVolumeRepo().Return(mockDockerFileSystemRepo) - - // // ==== test execution ===== - form := models.ValidGetPublishedDocumentRequest{DocumentID: entityID} - response := endpoints.GetPublishedDocument(form, mockDepFactory) - - assert.Equal(response.Status, http.StatusOK) - assert.Equal(response.Response, []byte("{\"Contents\": hello world}")) -} - -func TestUploadImage(t *testing.T) { - controller := gomock.NewController(t) - assert := assert.New(t) - defer controller.Finish() - - // ==== test setup ===== - entityID := uuid.New() - parentID := uuid.New() - entityToCreate := repositories.FilesystemEntry{ - LogicalName: "a.png", - ParentFileID: parentID, - IsDocument: false, - OwnerUserId: 1, - } - - mockFileRepo := repMocks.NewMockIFilesystemRepository(controller) - mockFileRepo.EXPECT().CreateEntry(entityToCreate).Return(repositories.FilesystemEntry{ - EntityID: entityID, - LogicalName: "a.png", - IsDocument: false, - ChildrenIDs: []uuid.UUID{}, - ParentFileID: parentID, - }, nil).Times(1) - - tempFile, _ := ioutil.TempFile(os.TempDir(), "expected") - defer os.Remove(tempFile.Name()) - - mockDockerFileSystemRepo := repMocks.NewMockIUnpublishedVolumeRepository(controller) - mockDockerFileSystemRepo.EXPECT().AddToVolume(entityID.String()).Return(nil).Times(1) - mockDockerFileSystemRepo.EXPECT().GetFromVolume(entityID.String()).Return(tempFile, nil).Times(1) - - mockDepFactory := createMockDependencyFactory(controller, mockFileRepo, true) - mockDepFactory.EXPECT().GetUnpublishedVolumeRepo().Return(mockDockerFileSystemRepo) - - // Create request - const pngBytes = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8/5+hHgAHggJ/PchI7wAAAABJRU5ErkJggg==" - garbageFile, _ := ioutil.TempFile(os.TempDir(), "input") - if _, err := garbageFile.WriteString(pngBytes); err != nil { - panic(err) - } - garbageFile.Seek(0, 0) - - defer os.Remove(garbageFile.Name()) - - form := models.ValidImageUploadRequest{ - Parent: parentID, - LogicalName: "a.png", - OwnerGroup: 1, - Image: garbageFile, - } - - // ==== test execution ===== - response := endpoints.UploadImage(form, mockDepFactory) - assert.Equal(response.Status, http.StatusOK) - assert.Equal(response.Response, models.NewEntityResponse{ - NewID: entityID, - }) - - // Assert that the file was written to - content, err := os.ReadFile(tempFile.Name()) - assert.Nil(err) - assert.Equal([]byte(pngBytes), content) -} +package tests + +import ( + "io/ioutil" + "net/http" + "os" + "testing" + + "cms.csesoc.unsw.edu.au/database/repositories" + repMocks "cms.csesoc.unsw.edu.au/database/repositories/mocks" + "cms.csesoc.unsw.edu.au/endpoints" + "cms.csesoc.unsw.edu.au/endpoints/models" + "github.com/golang/mock/gomock" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" +) + +func TestUploadDocument(t *testing.T) { +} + +func TestGetPublishedDocument(t *testing.T) { + controller := gomock.NewController(t) + assert := assert.New(t) + defer controller.Finish() + + // ==== test setup ===== + entityID := uuid.New() + + tempFile, _ := ioutil.TempFile(os.TempDir(), "expected") + if _, err := tempFile.WriteString("hello world"); err != nil { + panic(err) + } + tempFile.Seek(0, 0) + defer os.Remove(tempFile.Name()) + + mockDockerFileSystemRepo := repMocks.NewMockIPublishedVolumeRepository(controller) + mockDockerFileSystemRepo.EXPECT().GetFromVolume(entityID.String()).Return(tempFile, nil).Times(1) + + mockDepFactory := createMockDependencyFactory(controller, nil, true) + mockDepFactory.EXPECT().GetPublishedVolumeRepo().Return(mockDockerFileSystemRepo) + + // // ==== test execution ===== + form := models.ValidGetPublishedDocumentRequest{DocumentID: entityID} + response := endpoints.GetPublishedDocument(form, mockDepFactory) + + assert.Equal(response.Status, http.StatusOK) + assert.Equal(response.Response, []byte("{\"Contents\": hello world}")) +} + +func TestUploadImage(t *testing.T) { + controller := gomock.NewController(t) + assert := assert.New(t) + defer controller.Finish() + + // ==== test setup ===== + entityID := uuid.New() + parentID := uuid.New() + entityToCreate := repositories.FilesystemEntry{ + LogicalName: "a.png", + ParentFileID: parentID, + IsDocument: false, + OwnerUserId: 1, + } + + mockFileRepo := repMocks.NewMockIFilesystemRepository(controller) + mockFileRepo.EXPECT().CreateEntry(entityToCreate).Return(repositories.FilesystemEntry{ + EntityID: entityID, + LogicalName: "a.png", + IsDocument: false, + ChildrenIDs: []uuid.UUID{}, + ParentFileID: parentID, + }, nil).Times(1) + + tempFile, _ := ioutil.TempFile(os.TempDir(), "expected") + defer os.Remove(tempFile.Name()) + + mockDockerFileSystemRepo := repMocks.NewMockIUnpublishedVolumeRepository(controller) + mockDockerFileSystemRepo.EXPECT().AddToVolume(entityID.String()).Return(nil).Times(1) + mockDockerFileSystemRepo.EXPECT().GetFromVolume(entityID.String()).Return(tempFile, nil).Times(1) + + mockDepFactory := createMockDependencyFactory(controller, mockFileRepo, true) + mockDepFactory.EXPECT().GetUnpublishedVolumeRepo().Return(mockDockerFileSystemRepo) + + // Create request + const pngBytes = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8/5+hHgAHggJ/PchI7wAAAABJRU5ErkJggg==" + garbageFile, _ := ioutil.TempFile(os.TempDir(), "input") + if _, err := garbageFile.WriteString(pngBytes); err != nil { + panic(err) + } + garbageFile.Seek(0, 0) + + defer os.Remove(garbageFile.Name()) + + form := models.ValidImageUploadRequest{ + Parent: parentID, + LogicalName: "a.png", + OwnerGroup: 1, + Image: garbageFile, + } + + // ==== test execution ===== + response := endpoints.UploadImage(form, mockDepFactory) + assert.Equal(response.Status, http.StatusOK) + assert.Equal(response.Response, models.NewEntityResponse{ + NewID: entityID, + }) + + // Assert that the file was written to + content, err := os.ReadFile(tempFile.Name()) + assert.Nil(err) + assert.Equal([]byte(pngBytes), content) +} diff --git a/docker-compose.yml b/docker-compose.yml index eba518cd6..b0b9574bd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,90 +1,98 @@ -version: "3.8" - -services: - next: - container_name: next - build: - context: ./next - dockerfile: ./Dockerfile.development - volumes: - - './next:/next' - stdin_open: true - ports: - - 3001:3001 - - frontend: - container_name: frontend - build: - context: ./frontend - dockerfile: ./Dockerfile.development - volumes: - - './frontend:/app' - stdin_open: true - ports: - - 3000:3000 - - backend: - container_name: go_backend - build: - context: ./backend - dockerfile: ./Dockerfile.development - depends_on: - - migration - volumes: - - './backend:/go/src/cms.csesoc.unsw.edu.au' - - 'unpublished_document_data:/var/lib/documents/unpublished/data' - - 'published_document_data:/var/lib/documents/published/data' - ports: - - 8080:8080 - environment: - - FRONTEND_URI=${FRONTEND_URI} - - POSTGRES_USER=${PG_USER} - - POSTGRES_PASSWORD=${PG_PASSWORD} - - POSTGRES_DB=${PG_DB} - - POSTGRES_PORT=${PG_PORT} - - POSTGRES_HOST=${PG_HOST} - - db: - container_name: pg_container - image: postgres - restart: always - environment: - POSTGRES_USER: ${PG_USER} - POSTGRES_PASSWORD: ${PG_PASSWORD} - POSTGRES_DB: ${PG_DB} - ports: - - ${PG_PORT}:5432 - volumes: - - 'pg_data:/var/lib/postgresql/data' - - migration: - container_name: migration - build: - context: ./postgres - dockerfile: ./Dockerfile - depends_on: - - db - environment: - - POSTGRES_HOST=db - - POSTGRES_DB=${PG_DB} - - POSTGRES_USER=${PG_USER} - - POSTGRES_PASSWORD=${PG_PASSWORD} - - staging_db: - container_name: pg_container_testing - image: postgres - restart: always - user: postgres - environment: - POSTGRES_PASSWORD: test - POSTGRES_DB: cms_testing_db - ports: - - 1234:5432 - volumes: - - './postgres/up:/docker-entrypoint-initdb.d/' - - 'staging_pg_db:/var/lib/postgresql/data' -volumes: - pg_data: - staging_pg_db: - unpublished_document_data: +version: "3.8" + +services: + next: + container_name: next + build: + context: ./next + dockerfile: ./Dockerfile.development + volumes: + - './next:/next' + stdin_open: true + ports: + - 3001:3001 + + frontend: + container_name: frontend + build: + context: ./frontend + dockerfile: ./Dockerfile.development + volumes: + - './frontend:/app' + stdin_open: true + ports: + - 3000:3000 + + backend: + container_name: go_backend + build: + context: ./backend + dockerfile: ./Dockerfile.development + depends_on: + - migration + volumes: + - './backend:/go/src/cms.csesoc.unsw.edu.au' + - 'unpublished_document_data:/var/lib/documents/unpublished/data' + - 'published_document_data:/var/lib/documents/published/data' + ports: + - 8080:8080 + environment: + - FRONTEND_URI=${FRONTEND_URI} + - POSTGRES_USER=${PG_USER} + - POSTGRES_PASSWORD=${PG_PASSWORD} + - POSTGRES_DB=${PG_DB} + - POSTGRES_PORT=${PG_PORT} + - POSTGRES_HOST=${PG_HOST} + + db: + container_name: pg_container + image: postgres + restart: always + environment: + POSTGRES_USER: ${PG_USER} + POSTGRES_PASSWORD: ${PG_PASSWORD} + POSTGRES_DB: ${PG_DB} + ports: + - ${PG_PORT}:5432 + volumes: + - 'pg_data:/var/lib/postgresql/data' + + migration: + container_name: migration + build: + context: ./postgres + dockerfile: ./Dockerfile + depends_on: + - db + environment: + - POSTGRES_HOST=db + - POSTGRES_DB=${PG_DB} + - POSTGRES_USER=${PG_USER} + - POSTGRES_PASSWORD=${PG_PASSWORD} + + sqitch: + container_name: sqitch + build: + context: ./sqitch + dockerfile: ./Dockerfile + depends_on: + - db + + staging_db: + container_name: pg_container_testing + image: postgres + restart: always + user: postgres + environment: + POSTGRES_PASSWORD: test + POSTGRES_DB: cms_testing_db + ports: + - 1234:5432 + volumes: + - './postgres/up:/docker-entrypoint-initdb.d/' + - 'staging_pg_db:/var/lib/postgresql/data' +volumes: + pg_data: + staging_pg_db: + unpublished_document_data: published_document_data: \ No newline at end of file diff --git a/frontend/.dockerignore b/frontend/.dockerignore index 28204dd16..10f2a898e 100644 --- a/frontend/.dockerignore +++ b/frontend/.dockerignore @@ -1,7 +1,7 @@ -**/node_modules -**/npm-debug.log -build -.dockerignore -Dockerfile -Dockerfile.prod -Dockerfile.development +**/node_modules +**/npm-debug.log +build +.dockerignore +Dockerfile +Dockerfile.prod +Dockerfile.development diff --git a/frontend/Dockerfile b/frontend/Dockerfile index ad3734b9d..914b5861a 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -1,27 +1,27 @@ -# Grab the latest Node base image -FROM node:20.2.0-alpine as builder - -# Set the current working directory inside the container -WORKDIR /app - -COPY package.json package-lock.json ./ -RUN npm install - -COPY . . - -RUN npm run build - -# nginx state for serving content -FROM nginx:1.23.1-alpine -COPY ./.nginx/nginx.conf /etc/nginx/nginx.conf -# Set working directory to nginx asset directory -WORKDIR /usr/share/nginx/html -# Remove default nginx static assets -RUN rm -rf ./* -# Copy static assets from builder stage -COPY --from=builder /app/build . - -EXPOSE 80 - -# Containers run nginx with global directives and daemon off -ENTRYPOINT ["nginx", "-g", "daemon off;"] +# Grab the latest Node base image +FROM node:20.2.0-alpine as builder + +# Set the current working directory inside the container +WORKDIR /app + +COPY package.json package-lock.json ./ +RUN npm install + +COPY . . + +RUN npm run build + +# nginx state for serving content +FROM nginx:1.23.1-alpine +COPY ./.nginx/nginx.conf /etc/nginx/nginx.conf +# Set working directory to nginx asset directory +WORKDIR /usr/share/nginx/html +# Remove default nginx static assets +RUN rm -rf ./* +# Copy static assets from builder stage +COPY --from=builder /app/build . + +EXPOSE 80 + +# Containers run nginx with global directives and daemon off +ENTRYPOINT ["nginx", "-g", "daemon off;"] diff --git a/frontend/src/cse-ui-kit/CreateCodeBlock_button /CreateCodeBlock-Styled.tsx b/frontend/src/cse-ui-kit/CreateCodeBlock_button/CreateCodeBlock-Styled.tsx similarity index 100% rename from frontend/src/cse-ui-kit/CreateCodeBlock_button /CreateCodeBlock-Styled.tsx rename to frontend/src/cse-ui-kit/CreateCodeBlock_button/CreateCodeBlock-Styled.tsx diff --git a/frontend/src/cse-ui-kit/CreateCodeBlock_button /CreateCodeBlock.stories.tsx b/frontend/src/cse-ui-kit/CreateCodeBlock_button/CreateCodeBlock.stories.tsx similarity index 100% rename from frontend/src/cse-ui-kit/CreateCodeBlock_button /CreateCodeBlock.stories.tsx rename to frontend/src/cse-ui-kit/CreateCodeBlock_button/CreateCodeBlock.stories.tsx diff --git a/frontend/src/cse-ui-kit/CreateCodeBlock_button /CreateCodeBlock.tsx b/frontend/src/cse-ui-kit/CreateCodeBlock_button/CreateCodeBlock.tsx similarity index 100% rename from frontend/src/cse-ui-kit/CreateCodeBlock_button /CreateCodeBlock.tsx rename to frontend/src/cse-ui-kit/CreateCodeBlock_button/CreateCodeBlock.tsx diff --git a/frontend/src/cse-ui-kit/CreateCodeBlock_button /index.ts b/frontend/src/cse-ui-kit/CreateCodeBlock_button/index.ts similarity index 60% rename from frontend/src/cse-ui-kit/CreateCodeBlock_button /index.ts rename to frontend/src/cse-ui-kit/CreateCodeBlock_button/index.ts index 715546f48..e97f2cff5 100644 --- a/frontend/src/cse-ui-kit/CreateCodeBlock_button /index.ts +++ b/frontend/src/cse-ui-kit/CreateCodeBlock_button/index.ts @@ -1,3 +1,3 @@ import CreateCodeBlock from './CreateCodeBlock'; -export default CreateCodeBlock; \ No newline at end of file +export default CreateCodeBlock; diff --git a/frontend/src/cse-ui-kit/assets/bold-button.svg b/frontend/src/cse-ui-kit/assets/bold-button.svg index f43e94fa4..f4d3570b8 100644 --- a/frontend/src/cse-ui-kit/assets/bold-button.svg +++ b/frontend/src/cse-ui-kit/assets/bold-button.svg @@ -1,16 +1,16 @@ - - - - - - - - + + + + + + + + diff --git a/frontend/src/cse-ui-kit/assets/centeralign-button.svg b/frontend/src/cse-ui-kit/assets/centeralign-button.svg index e4f9b2776..e8b6ae269 100644 --- a/frontend/src/cse-ui-kit/assets/centeralign-button.svg +++ b/frontend/src/cse-ui-kit/assets/centeralign-button.svg @@ -1,55 +1,55 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/cse-ui-kit/assets/italics-button.svg b/frontend/src/cse-ui-kit/assets/italics-button.svg index 52d05023d..9fbcf2e81 100644 --- a/frontend/src/cse-ui-kit/assets/italics-button.svg +++ b/frontend/src/cse-ui-kit/assets/italics-button.svg @@ -1,40 +1,40 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/cse-ui-kit/assets/leftrightalign-button.svg b/frontend/src/cse-ui-kit/assets/leftrightalign-button.svg index f87de30d9..c0e5b038b 100644 --- a/frontend/src/cse-ui-kit/assets/leftrightalign-button.svg +++ b/frontend/src/cse-ui-kit/assets/leftrightalign-button.svg @@ -1,55 +1,55 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/cse-ui-kit/assets/underline-button.svg b/frontend/src/cse-ui-kit/assets/underline-button.svg index de03f9764..ed7928f79 100644 --- a/frontend/src/cse-ui-kit/assets/underline-button.svg +++ b/frontend/src/cse-ui-kit/assets/underline-button.svg @@ -1,48 +1,48 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/cse-ui-kit/assets/upload-content.svg b/frontend/src/cse-ui-kit/assets/upload-content.svg index 73a16c64f..365578008 100644 --- a/frontend/src/cse-ui-kit/assets/upload-content.svg +++ b/frontend/src/cse-ui-kit/assets/upload-content.svg @@ -1,50 +1,50 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/sqitch/Dockerfile b/sqitch/Dockerfile new file mode 100644 index 000000000..bb2df352d --- /dev/null +++ b/sqitch/Dockerfile @@ -0,0 +1 @@ +FROM sqitch/sqitch:latest \ No newline at end of file diff --git a/sqitch/deploy/01-create_migration_table.sql b/sqitch/deploy/01-create_migration_table.sql new file mode 100644 index 000000000..29724539a --- /dev/null +++ b/sqitch/deploy/01-create_migration_table.sql @@ -0,0 +1,18 @@ +-- Deploy website:01-create_migration_table to pg + +BEGIN; + +-- XXX Add DDLs here. +CREATE TABLE IF NOT EXISTS migrations ( + MigrationID SERIAL PRIMARY KEY, + VersionID INTEGER default 0 +); + +DO LANGUAGE plpgsql $$ +BEGIN + IF NOT EXISTS (SELECT FROM migrations WHERE MigrationID = 1) THEN + INSERT INTO migrations (MigrationID, VersionID) VALUES (1, 0); + END IF; +END $$; + +COMMIT; diff --git a/sqitch/deploy/02-create_frontend_table.sql b/sqitch/deploy/02-create_frontend_table.sql new file mode 100644 index 000000000..bf8fffdea --- /dev/null +++ b/sqitch/deploy/02-create_frontend_table.sql @@ -0,0 +1,12 @@ +-- Deploy website:02-create_frontend_table to pg +-- requires: 01-create_migration_table + +BEGIN; + +CREATE TABLE frontend ( + FrontendID SERIAL PRIMARY KEY, + FrontendURL VARCHAR(100) +); +-- XXX Add DDLs here. + +COMMIT; diff --git a/sqitch/deploy/03-create_groups_table.sql b/sqitch/deploy/03-create_groups_table.sql new file mode 100644 index 000000000..c399be5db --- /dev/null +++ b/sqitch/deploy/03-create_groups_table.sql @@ -0,0 +1,18 @@ +-- Deploy website:03-create_groups_table to pg +-- requires: 02-create_frontend_table + +BEGIN; + +-- XXX Add DDLs here. +CREATE EXTENSION IF NOT EXISTS hstore; +SET timezone = 'Australia/Sydney'; + +CREATE TYPE permissions_enum as ENUM ('read', 'write', 'delete'); + +CREATE TABLE IF NOT EXISTS groups ( + UID SERIAL PRIMARY KEY, + Name VARCHAR(50) NOT NULL, + Permission permissions_enum UNIQUE NOT NULL +); + +COMMIT; diff --git a/sqitch/deploy/04-create_person_table.sql b/sqitch/deploy/04-create_person_table.sql new file mode 100644 index 000000000..974763606 --- /dev/null +++ b/sqitch/deploy/04-create_person_table.sql @@ -0,0 +1,38 @@ +-- Deploy website:04-create_person_table to pg +-- requires: 03-create_groups_table + +BEGIN; + +-- XXX Add DDLs here. +CREATE TABLE person ( + UID SERIAL PRIMARY KEY, + Email VARCHAR(50) UNIQUE NOT NULL, + First_name VARCHAR(50) NOT NULL, + Password CHAR(64) NOT NULL, + + isOfGroup INT, + frontendid INT, + + CONSTRAINT fk_AccessLevel FOREIGN KEY (isOfGroup) + REFERENCES groups(UID), + + CONSTRAINT fk_AccessFrontend FOREIGN KEY (frontendid) + REFERENCES frontend(FrontendID), + + /* non duplicate email and password constraints */ + CONSTRAINT no_dupes UNIQUE (Email, Password) +); + +/* create user function plpgsql */ +CREATE OR REPLACE FUNCTION create_normal_user (email VARCHAR, name VARCHAR, password VARCHAR, frontendID INT) RETURNS void +LANGUAGE plpgsql +AS $$ +DECLARE +BEGIN + INSERT INTO person (Email, First_name, Password, isOfGroup, frontendID) + VALUES (email, name, encode(sha256(password::BYTEA), 'hex'), 2, 1); +END $$; + + + +COMMIT; diff --git a/sqitch/deploy/05-create_filesystem_table.sql b/sqitch/deploy/05-create_filesystem_table.sql new file mode 100644 index 000000000..96b7eff11 --- /dev/null +++ b/sqitch/deploy/05-create_filesystem_table.sql @@ -0,0 +1,93 @@ +-- Deploy website:05-create_filesystem_table to pg +-- requires: 04-create_person_table + +BEGIN; + +-- XXX Add DDLs here. +SET timezone = 'Australia/Sydney'; +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + +/* MetaData */ +CREATE TABLE metadata ( + MetadataID uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + CreatedAt TIMESTAMP NOT NULL DEFAULT NOW() +); + +/** + The filesystem table models all file heirachies in our system +**/ +CREATE TABLE filesystem ( + EntityID uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + LogicalName VARCHAR(50) NOT NULL, + + IsDocument BOOLEAN DEFAULT false, + IsPublished BOOLEAN DEFAULT false, + CreatedAt TIMESTAMP NOT NULL DEFAULT NOW(), + + /* MetaData */ + -- MetadataID uuid NOT NULL, + + OwnedBy INT, + /* Pain */ + Parent uuid REFERENCES filesystem(EntityID) DEFAULT NULL, + + /* FK Constraint */ + CONSTRAINT fk_owner FOREIGN KEY (OwnedBy) + REFERENCES groups(UID), + + -- CONSTRAINT fk_meta FOREIGN KEY (MetadataID) REFERENCES metadata(MetadataID), + + /* Unique name constraint: there should not exist an entity of the same type with the + same parent and logical name. */ + CONSTRAINT unique_name UNIQUE (Parent, LogicalName, IsDocument) +); + +/* Utility procedure :) */ +CREATE OR REPLACE FUNCTION new_entity (parentP uuid, logicalNameP VARCHAR, ownedByP INT, isDocumentP BOOLEAN DEFAULT false) RETURNS uuid +LANGUAGE plpgsql +AS $$ +DECLARE + newEntityID filesystem.EntityID%type; + parentIsDocument BOOLEAN := (SELECT IsDocument FROM filesystem WHERE EntityID = parentP LIMIT 1); +BEGIN + IF parentIsDocument THEN + /* We shouldnt be delcaring that a document is our parent */ + RAISE EXCEPTION SQLSTATE '90001' USING MESSAGE = 'cannot make parent a document'; + END If; + WITH newEntity AS ( + INSERT INTO filesystem (LogicalName, IsDocument, OwnedBy, Parent) + VALUES (logicalNameP, isDocumentP, ownedByP, parentP) + RETURNING EntityID + ) + + SELECT newEntity.EntityID INTO newEntityID FROM newEntity; + RETURN newEntityID; +END $$; + +/* Another utility procedure */ +CREATE OR REPLACE FUNCTION delete_entity (entityIDP uuid) RETURNS void +LANGUAGE plpgsql +AS $$ +DECLARE + numKids INT := (SELECT COUNT(EntityID) FROM filesystem WHERE Parent = entityIDP); + isRoot BOOLEAN := ((SELECT Parent FROM filesystem WHERE EntityID = entityIDP) IS NULL); +BEGIN + /* If this is a directory and has kids raise an error */ + IF numKids > 0 + THEN + /* entity has children (please dont orphan them O_O ) */ + RAISE EXCEPTION SQLSTATE '90001' USING MESSAGE = 'entity has children (please dont orphan them O_O )'; + END IF; + + IF isRoot THEN + /* stop trying to delete root >:( */ + RAISE EXCEPTION SQLSTATE '90001' USING MESSAGE = 'stop trying to delete root >:('; + END IF; + + DELETE FROM filesystem WHERE EntityID = entityIDP; +END $$; + + + + +COMMIT; diff --git a/sqitch/deploy/06-create_dummy_data.sql b/sqitch/deploy/06-create_dummy_data.sql new file mode 100644 index 000000000..0d1fb12d5 --- /dev/null +++ b/sqitch/deploy/06-create_dummy_data.sql @@ -0,0 +1,68 @@ +-- Deploy website:06-create_dummy_data to pg +-- requires: 05-create_filesystem_table + +BEGIN; + +-- XXX Add DDLs here. +SET timezone = 'Australia/Sydney'; + +/* Create default groups */ +INSERT INTO groups (Name, Permission) VALUES ('admin', 'delete'); +INSERT INTO groups (name, Permission) VALUES ('user', 'write'); + +/* Setup FS table and modify constraints */ +/* Insert root directory and then add our constraints */ +DO $$ +DECLARE + randomGroup groups.UID%type; + rootID filesystem.EntityID%type; +BEGIN + SELECT groups.UID INTO randomGroup FROM groups WHERE Name = 'admin'::VARCHAR; + /* Insert the root directory */ + INSERT INTO filesystem (EntityID, LogicalName, OwnedBy) + VALUES (uuid_nil(), 'root', randomGroup); + SELECT filesystem.EntityID INTO rootID FROM filesystem WHERE LogicalName = 'root'::VARCHAR; + /* Set parent to uuid_nil() because postgres driver has issue supporting NULL values */ + UPDATE filesystem SET Parent = uuid_nil() WHERE EntityID = rootID; + + /* insert "has parent" constraint*/ + EXECUTE 'ALTER TABLE filesystem + ADD CONSTRAINT has_parent CHECK (Parent != NULL)'; +END $$; + + + +/* create a dummy frontend */ +INSERT INTO frontend (FrontendURL) VALUES ('http://localhost:8080'::VARCHAR); + +/* Insert dummy data */ +DO $$ +DECLARE + rootID filesystem.EntityID%type; + newEntity filesystem.EntityID%type; + wasPopping filesystem.EntityID%type; + oldEntity filesystem.EntityID%type; +BEGIN + SELECT filesystem.EntityID INTO rootID FROM filesystem WHERE EntityID = uuid_nil(); + + newEntity := (SELECT new_entity(rootID, 'downloads'::VARCHAR, 1, false)); + oldEntity := (SELECT new_entity(rootID, 'documents'::VARCHAR, 1, false)); + + wasPopping := (SELECT new_entity(oldEntity, 'cool_document'::VARCHAR, 1, true)); + wasPopping := (SELECT new_entity(oldEntity, 'cool_document_round_2'::VARCHAR, 1, true)); + PERFORM delete_entity(wasPopping); + wasPopping := (SELECT new_entity(oldEntity, 'cool_document_round_2'::VARCHAR, 1, true)); +END $$; + + +/* inserting two accounts into db */ +DO LANGUAGE plpgsql $$ +BEGIN + EXECUTE create_normal_user('z0000000@ad.unsw.edu.au', 'adam', 'password', 1); + EXECUTE create_normal_user('john.smith@gmail.com', 'john', 'password', 1); + EXECUTE create_normal_user('jane.doe@gmail.com', 'jane', 'password', 1); +END $$; + + + +COMMIT; diff --git a/sqitch/revert/01-create_migration_table.sql b/sqitch/revert/01-create_migration_table.sql new file mode 100644 index 000000000..edad4975d --- /dev/null +++ b/sqitch/revert/01-create_migration_table.sql @@ -0,0 +1,8 @@ +-- Revert website:01-create_migration_table from pg + +BEGIN; + +-- XXX Add DDLs here. +DROP TABLE IF EXISTS migrations CASCADE; + +COMMIT; diff --git a/sqitch/revert/02-create_frontend_table.sql b/sqitch/revert/02-create_frontend_table.sql new file mode 100644 index 000000000..b358e3417 --- /dev/null +++ b/sqitch/revert/02-create_frontend_table.sql @@ -0,0 +1,8 @@ +-- Revert website:02-create_frontend_table from pg + +BEGIN; + +-- XXX Add DDLs here. +DROP TABLE IF EXISTS frontend; + +COMMIT; diff --git a/sqitch/revert/03-create_groups_table.sql b/sqitch/revert/03-create_groups_table.sql new file mode 100644 index 000000000..cc3857bea --- /dev/null +++ b/sqitch/revert/03-create_groups_table.sql @@ -0,0 +1,9 @@ +-- Revert website:03-create_groups_table from pg + +BEGIN; + +-- XXX Add DDLs here. +DROP TYPE IF EXISTS permissions_enum CASCADE; + +DROP TABLE IF EXISTS groups CASCADE; +COMMIT; diff --git a/sqitch/revert/04-create_person_table.sql b/sqitch/revert/04-create_person_table.sql new file mode 100644 index 000000000..76a9c9431 --- /dev/null +++ b/sqitch/revert/04-create_person_table.sql @@ -0,0 +1,11 @@ +-- Revert website:04-create_person_table from pg + +BEGIN; + +-- XXX Add DDLs here. +DROP TABLE IF EXISTS person; + +DROP FUNCTION IF EXISTS create_normal_user; + + +COMMIT; diff --git a/sqitch/revert/05-create_filesystem_table.sql b/sqitch/revert/05-create_filesystem_table.sql new file mode 100644 index 000000000..c9e29b8f3 --- /dev/null +++ b/sqitch/revert/05-create_filesystem_table.sql @@ -0,0 +1,11 @@ +-- Revert website:05-create_filesystem_table from pg + +BEGIN; + +-- XXX Add DDLs here. +DROP TABLE IF EXISTS metadata; +DROP TABLE IF EXISTS filesystem; +DROP FUNCTION IF EXISTS new_entity; +DROP FUNCTION IF EXISTS delete_entity; + +COMMIT; diff --git a/sqitch/revert/06-create_dummy_data.sql b/sqitch/revert/06-create_dummy_data.sql new file mode 100644 index 000000000..1c6baab4b --- /dev/null +++ b/sqitch/revert/06-create_dummy_data.sql @@ -0,0 +1,7 @@ +-- Revert website:06-create_dummy_data from pg + +BEGIN; + +-- XXX Add DDLs here. + +COMMIT; diff --git a/sqitch/sqitch.bat b/sqitch/sqitch.bat new file mode 100644 index 000000000..86fb0fde7 --- /dev/null +++ b/sqitch/sqitch.bat @@ -0,0 +1,51 @@ +@echo off & setlocal enableextensions enabledelayedexpansion +REM # Determine which Docker image to run. +IF NOT DEFINED SQITCH_IMAGE ( + set SQITCH_IMAGE=website-sqitch:latest +) +REM set SQITCH_IMAGE=website-sqitch:latest + +REM # Set up required pass-through variables. +FOR /F "tokens=*" %%g IN ('whoami') do (SET user=%%g) +set passopt= -e SQITCH_ORIG_SYSUSER="%username%" +FOR /F "tokens=*" %%g IN ('hostname') do (SET machinehostname=%%g) +set passopt=%passopt% -e SQITCH_ORIG_EMAIL="%username%@%machinehostname%" +FOR /F "tokens=*" %%g IN ('tzutil /g') do (SET TZ=%%g) +set passopt=%passopt% -e TZ="%TZ%" +if NOT DEFINED LESS ( + set LESS=-R +) +set passopt=%passopt% -e LESS=%LESS% + +for %%i in ( + SQITCH_CONFIG SQITCH_USERNAME SQITCH_PASSWORD SQITCH_FULLNAME SQITCH_EMAIL SQITCH_TARGET + DBI_TRACE + PGUSER PGPASSWORD PGHOST PGHOSTADDR PGPORT PGDATABASE PGSERVICE PGOPTIONS PGSSLMODE PGREQUIRESSL PGSSLCOMPRESSION PGREQUIREPEER PGKRBSRVNAME PGKRBSRVNAME PGGSSLIB PGCONNECT_TIMEOUT PGCLIENTENCODING PGTARGETSESSIONATTRS + MYSQL_PWD MYSQL_HOST MYSQL_TCP_PORT + TNS_ADMIN TWO_TASK ORACLE_SID + ISC_USER ISC_PASSWORD + VSQL_HOST VSQL_PORT VSQL_USER VSQL_PASSWORD VSQL_SSLMODE + SNOWSQL_ACCOUNT SNOWSQL_USER SNOWSQL_PWD SNOWSQL_HOST SNOWSQL_PORT SNOWSQL_DATABASE SNOWSQL_REGION SNOWSQL_WAREHOUSE SNOWSQL_PRIVATE_KEY_PASSPHRASE +) do if defined %%i ( + echo %%i is defined as !%%i! + SET passopt=!passopt! -e !%%i! +) + +REM # Determine the name of the container home directory. +set homedst=/home +REM if [ $(id -u ${user}) -eq 0 ]; then +REM homedst=/root +REM fi +REM # Set HOME, since the user ID likely won't be the same as for the sqitch user. +set passopt=%passopt% -e HOME="%homedst%" + +echo %passopt% + +REM # Run the container with the current and home directories mounted. +@echo on +docker run -it --rm --network host ^ + --mount "type=bind,src=%cd%,dst=/repo" ^ + --mount "type=bind,src=%UserProfile%,dst=%homedst%" ^ + %passopt% %SQITCH_IMAGE% %* + +@endlocal diff --git a/sqitch/sqitch.conf b/sqitch/sqitch.conf new file mode 100644 index 000000000..4d121dd98 --- /dev/null +++ b/sqitch/sqitch.conf @@ -0,0 +1,10 @@ +[core] + engine = pg + # plan_file = sqitch.plan + # top_dir = . +# [engine "pg"] + # target = db:pg: + # registry = sqitch + # client = psql +[target "test"] + uri = db:pg://postgres:postgres@localhost:5432/test_db diff --git a/sqitch/sqitch.plan b/sqitch/sqitch.plan new file mode 100644 index 000000000..e233ce8e3 --- /dev/null +++ b/sqitch/sqitch.plan @@ -0,0 +1,9 @@ +%syntax-version=1.0.0 +%project=website + +01-create_migration_table 2023-09-14T13:46:21Z John Doe # Add migration table +02-create_frontend_table [01-create_migration_table] 2023-09-14T13:47:39Z John Doe # Add frontend table +03-create_groups_table [02-create_frontend_table] 2023-09-14T13:50:37Z John Doe # Groups table +04-create_person_table [03-create_groups_table] 2023-09-14T13:52:42Z John Doe # person table +05-create_filesystem_table [04-create_person_table] 2023-09-14T13:54:21Z John Doe # filesystem +06-create_dummy_data [05-create_filesystem_table] 2023-09-14T13:56:23Z John Doe # dummy data diff --git a/sqitch/sqitch.sh b/sqitch/sqitch.sh new file mode 100644 index 000000000..5ab689134 --- /dev/null +++ b/sqitch/sqitch.sh @@ -0,0 +1,68 @@ +#!/usr/bin/env bash + +# Determine which Docker image to run. +SQITCH_IMAGE=${SQITCH_IMAGE:=website-sqitch:latest} + +# Set up required pass-through variables. +user=${USER-$(whoami)} +passopt=( + -e "SQITCH_ORIG_SYSUSER=$user" + -e "SQITCH_ORIG_EMAIL=$user@$(hostname)" + -e "TZ=$(date +%Z)" \ + -e "LESS=${LESS:--R}" \ +) + +# Handle OS-specific options. +case "$(uname -s)" in + Linux*) + passopt+=(-e "SQITCH_ORIG_FULLNAME=$(getent passwd $user | cut -d: -f5 | cut -d, -f1)") + passopt+=(-u $(id -u ${user}):$(id -g ${user})) + ;; + Darwin*) + passopt+=(-e "SQITCH_ORIG_FULLNAME=$(/usr/bin/id -P $user | awk -F '[:]' '{print $8}')") + ;; + MINGW*|CYGWIN*) + passopt+=(-e "SQITCH_ORIG_FULLNAME=$(net user $user)") + ;; + *) + echo "Unknown OS: $(uname -s)" + exit 2 + ;; +esac + +# Iterate over optional Sqitch and engine variables. +for var in \ + SQITCH_CONFIG SQITCH_USERNAME SQITCH_PASSWORD SQITCH_FULLNAME SQITCH_EMAIL SQITCH_TARGET \ + DBI_TRACE \ + PGUSER PGPASSWORD PGHOST PGHOSTADDR PGPORT PGDATABASE PGSERVICE PGOPTIONS PGSSLMODE PGREQUIRESSL PGSSLCOMPRESSION PGREQUIREPEER PGKRBSRVNAME PGKRBSRVNAME PGGSSLIB PGCONNECT_TIMEOUT PGCLIENTENCODING PGTARGETSESSIONATTRS \ + MYSQL_PWD MYSQL_HOST MYSQL_TCP_PORT \ + TNS_ADMIN TWO_TASK ORACLE_SID \ + ISC_USER ISC_PASSWORD \ + VSQL_HOST VSQL_PORT VSQL_USER VSQL_PASSWORD VSQL_SSLMODE \ + SNOWSQL_ACCOUNT SNOWSQL_USER SNOWSQL_PWD SNOWSQL_HOST SNOWSQL_PORT SNOWSQL_DATABASE SNOWSQL_REGION SNOWSQL_WAREHOUSE SNOWSQL_PRIVATE_KEY_PASSPHRASE SNOWSQL_ROLE +do + if [ -n "${!var}" ]; then + passopt+=(-e $var) + fi +done + +# Determine the name of the container home directory. +homedst=/home +if [ $(id -u ${user}) -eq 0 ]; then + homedst=/root +fi +# Set HOME, since the user ID likely won't be the same as for the sqitch user. +passopt+=(-e "HOME=${homedst}") + +# Determine necessary flags when stdin and/or stdout are opened on a TTY. +if [ -t 0 ] && [ -t 1 ]; then + passopt+=(--interactive --tty) +elif [ ! -t 0 ]; then + passopt+=(--interactive) +fi + +# Run the container with the current and home directories mounted. +docker run --rm --network host \ + --mount "type=bind,src=$(pwd),dst=/repo" \ + --mount "type=bind,src=$HOME,dst=$homedst" \ + "${passopt[@]}" "$SQITCH_IMAGE" "$@" \ No newline at end of file diff --git a/sqitch/verify/01-create_migration_table.sql b/sqitch/verify/01-create_migration_table.sql new file mode 100644 index 000000000..9d94d7bba --- /dev/null +++ b/sqitch/verify/01-create_migration_table.sql @@ -0,0 +1,7 @@ +-- Verify website:01-create_migration_table on pg + +BEGIN; + +-- XXX Add verifications here. + +ROLLBACK; diff --git a/sqitch/verify/02-create_frontend_table.sql b/sqitch/verify/02-create_frontend_table.sql new file mode 100644 index 000000000..71177e06d --- /dev/null +++ b/sqitch/verify/02-create_frontend_table.sql @@ -0,0 +1,7 @@ +-- Verify website:02-create_frontend_table on pg + +BEGIN; + +-- XXX Add verifications here. + +ROLLBACK; diff --git a/sqitch/verify/03-create_groups_table.sql b/sqitch/verify/03-create_groups_table.sql new file mode 100644 index 000000000..e2a6d2fbb --- /dev/null +++ b/sqitch/verify/03-create_groups_table.sql @@ -0,0 +1,7 @@ +-- Verify website:03-create_groups_table on pg + +BEGIN; + +-- XXX Add verifications here. + +ROLLBACK; diff --git a/sqitch/verify/04-create_person_table.sql b/sqitch/verify/04-create_person_table.sql new file mode 100644 index 000000000..5533ed625 --- /dev/null +++ b/sqitch/verify/04-create_person_table.sql @@ -0,0 +1,7 @@ +-- Verify website:04-create_person_table on pg + +BEGIN; + +-- XXX Add verifications here. + +ROLLBACK; diff --git a/sqitch/verify/05-create_filesystem_table.sql b/sqitch/verify/05-create_filesystem_table.sql new file mode 100644 index 000000000..ab0636717 --- /dev/null +++ b/sqitch/verify/05-create_filesystem_table.sql @@ -0,0 +1,7 @@ +-- Verify website:05-create_filesystem_table on pg + +BEGIN; + +-- XXX Add verifications here. + +ROLLBACK; diff --git a/sqitch/verify/06-create_dummy_data.sql b/sqitch/verify/06-create_dummy_data.sql new file mode 100644 index 000000000..cad81be19 --- /dev/null +++ b/sqitch/verify/06-create_dummy_data.sql @@ -0,0 +1,7 @@ +-- Verify website:06-create_dummy_data on pg + +BEGIN; + +-- XXX Add verifications here. + +ROLLBACK; diff --git a/utilities/createUsers.sh b/utilities/createUsers.sh old mode 100755 new mode 100644 diff --git a/utilities/ghost-exporter/.paket/Paket.Restore.targets b/utilities/ghost-exporter/.paket/Paket.Restore.targets index e230bb215..4deb15bcd 100644 --- a/utilities/ghost-exporter/.paket/Paket.Restore.targets +++ b/utilities/ghost-exporter/.paket/Paket.Restore.targets @@ -1,557 +1,557 @@ - - - - - - - $(MSBuildAllProjects);$(MSBuildThisFileFullPath) - - $(MSBuildVersion) - 15.0.0 - false - true - - true - $(MSBuildThisFileDirectory) - $(MSBuildThisFileDirectory)..\ - $(PaketRootPath)paket-files\paket.restore.cached - $(PaketRootPath)paket.lock - classic - proj - assembly - native - /Library/Frameworks/Mono.framework/Commands/mono - mono - - - $(PaketRootPath)paket.bootstrapper.exe - $(PaketToolsPath)paket.bootstrapper.exe - $([System.IO.Path]::GetDirectoryName("$(PaketBootStrapperExePath)"))\ - - "$(PaketBootStrapperExePath)" - $(MonoPath) --runtime=v4.0.30319 "$(PaketBootStrapperExePath)" - - - - - true - true - - - True - - - False - - $(BaseIntermediateOutputPath.TrimEnd('\').TrimEnd('\/')) - - - - - - - - - $(PaketRootPath)paket - $(PaketToolsPath)paket - - - - - - $(PaketRootPath)paket.exe - $(PaketToolsPath)paket.exe - - - - - - <_DotnetToolsJson Condition="Exists('$(PaketRootPath)/.config/dotnet-tools.json')">$([System.IO.File]::ReadAllText("$(PaketRootPath)/.config/dotnet-tools.json")) - <_ConfigContainsPaket Condition=" '$(_DotnetToolsJson)' != ''">$(_DotnetToolsJson.Contains('"paket"')) - <_ConfigContainsPaket Condition=" '$(_ConfigContainsPaket)' == ''">false - - - - - - - - - - - <_PaketCommand>dotnet paket - - - - - - $(PaketToolsPath)paket - $(PaketBootStrapperExeDir)paket - - - paket - - - - - <_PaketExeExtension>$([System.IO.Path]::GetExtension("$(PaketExePath)")) - <_PaketCommand Condition=" '$(_PaketCommand)' == '' AND '$(_PaketExeExtension)' == '.dll' ">dotnet "$(PaketExePath)" - <_PaketCommand Condition=" '$(_PaketCommand)' == '' AND '$(OS)' != 'Windows_NT' AND '$(_PaketExeExtension)' == '.exe' ">$(MonoPath) --runtime=v4.0.30319 "$(PaketExePath)" - <_PaketCommand Condition=" '$(_PaketCommand)' == '' ">"$(PaketExePath)" - - - - - - - - - - - - - - - - - - - - - true - $(NoWarn);NU1603;NU1604;NU1605;NU1608 - false - true - - - - - - - - - $([System.IO.File]::ReadAllText('$(PaketRestoreCacheFile)')) - - - - - - - $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `": "`)[0].Replace(`"`, ``).Replace(` `, ``)) - $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `": "`)[1].Replace(`"`, ``).Replace(` `, ``)) - - - - - %(PaketRestoreCachedKeyValue.Value) - %(PaketRestoreCachedKeyValue.Value) - - - - - true - false - true - - - - - true - - - - - - - - - - - - - - - - - - - $(PaketIntermediateOutputPath)\$(MSBuildProjectFile).paket.references.cached - - $(MSBuildProjectFullPath).paket.references - - $(MSBuildProjectDirectory)\$(MSBuildProjectName).paket.references - - $(MSBuildProjectDirectory)\paket.references - - false - true - true - references-file-or-cache-not-found - - - - - $([System.IO.File]::ReadAllText('$(PaketReferencesCachedFilePath)')) - $([System.IO.File]::ReadAllText('$(PaketOriginalReferencesFilePath)')) - references-file - false - - - - - false - - - - - true - target-framework '$(TargetFramework)' or '$(TargetFrameworks)' files @(PaketResolvedFilePaths) - - - - - - - - - - - false - true - - - - - - - - - - - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',').Length) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[0]) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[1]) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[4]) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[5]) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[6]) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[7]) - - - %(PaketReferencesFileLinesInfo.PackageVersion) - All - runtime - $(ExcludeAssets);contentFiles - $(ExcludeAssets);build;buildMultitargeting;buildTransitive - true - true - - - - - $(PaketIntermediateOutputPath)/$(MSBuildProjectFile).paket.clitools - - - - - - - - - $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[0]) - $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[1]) - - - %(PaketCliToolFileLinesInfo.PackageVersion) - - - - - - - - - - false - - - - - - <_NuspecFilesNewLocation Include="$(PaketIntermediateOutputPath)\$(Configuration)\*.nuspec"/> - - - - - - $(MSBuildProjectDirectory)/$(MSBuildProjectFile) - true - false - true - false - true - false - true - false - true - false - true - $(PaketIntermediateOutputPath)\$(Configuration) - $(PaketIntermediateOutputPath) - - - - <_NuspecFiles Include="$(AdjustedNuspecOutputPath)\*.$(PackageVersion.Split(`+`)[0]).nuspec"/> - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + $(MSBuildAllProjects);$(MSBuildThisFileFullPath) + + $(MSBuildVersion) + 15.0.0 + false + true + + true + $(MSBuildThisFileDirectory) + $(MSBuildThisFileDirectory)..\ + $(PaketRootPath)paket-files\paket.restore.cached + $(PaketRootPath)paket.lock + classic + proj + assembly + native + /Library/Frameworks/Mono.framework/Commands/mono + mono + + + $(PaketRootPath)paket.bootstrapper.exe + $(PaketToolsPath)paket.bootstrapper.exe + $([System.IO.Path]::GetDirectoryName("$(PaketBootStrapperExePath)"))\ + + "$(PaketBootStrapperExePath)" + $(MonoPath) --runtime=v4.0.30319 "$(PaketBootStrapperExePath)" + + + + + true + true + + + True + + + False + + $(BaseIntermediateOutputPath.TrimEnd('\').TrimEnd('\/')) + + + + + + + + + $(PaketRootPath)paket + $(PaketToolsPath)paket + + + + + + $(PaketRootPath)paket.exe + $(PaketToolsPath)paket.exe + + + + + + <_DotnetToolsJson Condition="Exists('$(PaketRootPath)/.config/dotnet-tools.json')">$([System.IO.File]::ReadAllText("$(PaketRootPath)/.config/dotnet-tools.json")) + <_ConfigContainsPaket Condition=" '$(_DotnetToolsJson)' != ''">$(_DotnetToolsJson.Contains('"paket"')) + <_ConfigContainsPaket Condition=" '$(_ConfigContainsPaket)' == ''">false + + + + + + + + + + + <_PaketCommand>dotnet paket + + + + + + $(PaketToolsPath)paket + $(PaketBootStrapperExeDir)paket + + + paket + + + + + <_PaketExeExtension>$([System.IO.Path]::GetExtension("$(PaketExePath)")) + <_PaketCommand Condition=" '$(_PaketCommand)' == '' AND '$(_PaketExeExtension)' == '.dll' ">dotnet "$(PaketExePath)" + <_PaketCommand Condition=" '$(_PaketCommand)' == '' AND '$(OS)' != 'Windows_NT' AND '$(_PaketExeExtension)' == '.exe' ">$(MonoPath) --runtime=v4.0.30319 "$(PaketExePath)" + <_PaketCommand Condition=" '$(_PaketCommand)' == '' ">"$(PaketExePath)" + + + + + + + + + + + + + + + + + + + + + true + $(NoWarn);NU1603;NU1604;NU1605;NU1608 + false + true + + + + + + + + + $([System.IO.File]::ReadAllText('$(PaketRestoreCacheFile)')) + + + + + + + $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `": "`)[0].Replace(`"`, ``).Replace(` `, ``)) + $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `": "`)[1].Replace(`"`, ``).Replace(` `, ``)) + + + + + %(PaketRestoreCachedKeyValue.Value) + %(PaketRestoreCachedKeyValue.Value) + + + + + true + false + true + + + + + true + + + + + + + + + + + + + + + + + + + $(PaketIntermediateOutputPath)\$(MSBuildProjectFile).paket.references.cached + + $(MSBuildProjectFullPath).paket.references + + $(MSBuildProjectDirectory)\$(MSBuildProjectName).paket.references + + $(MSBuildProjectDirectory)\paket.references + + false + true + true + references-file-or-cache-not-found + + + + + $([System.IO.File]::ReadAllText('$(PaketReferencesCachedFilePath)')) + $([System.IO.File]::ReadAllText('$(PaketOriginalReferencesFilePath)')) + references-file + false + + + + + false + + + + + true + target-framework '$(TargetFramework)' or '$(TargetFrameworks)' files @(PaketResolvedFilePaths) + + + + + + + + + + + false + true + + + + + + + + + + + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',').Length) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[0]) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[1]) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[4]) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[5]) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[6]) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[7]) + + + %(PaketReferencesFileLinesInfo.PackageVersion) + All + runtime + $(ExcludeAssets);contentFiles + $(ExcludeAssets);build;buildMultitargeting;buildTransitive + true + true + + + + + $(PaketIntermediateOutputPath)/$(MSBuildProjectFile).paket.clitools + + + + + + + + + $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[0]) + $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[1]) + + + %(PaketCliToolFileLinesInfo.PackageVersion) + + + + + + + + + + false + + + + + + <_NuspecFilesNewLocation Include="$(PaketIntermediateOutputPath)\$(Configuration)\*.nuspec"/> + + + + + + $(MSBuildProjectDirectory)/$(MSBuildProjectFile) + true + false + true + false + true + false + true + false + true + false + true + $(PaketIntermediateOutputPath)\$(Configuration) + $(PaketIntermediateOutputPath) + + + + <_NuspecFiles Include="$(AdjustedNuspecOutputPath)\*.$(PackageVersion.Split(`+`)[0]).nuspec"/> + + + + + + + + + + + + + + + + + + + + + + + diff --git a/utilities/ghost-exporter/Main.fs b/utilities/ghost-exporter/Main.fs index 33d9e0cd7..e9a4da00f 100644 --- a/utilities/ghost-exporter/Main.fs +++ b/utilities/ghost-exporter/Main.fs @@ -1,25 +1,25 @@ -module Main - -open GhostSyntax -open CMSSyntax -open Newtonsoft.Json - -open FSharpPlus -open Fleece.Newtonsoft -open System.IO -open Converters - -[] -let main args = - let ghostDocument = ofJsonText (File.ReadAllText args[0]) - - match Option.ofResult ghostDocument with - | Some document -> - // Write the result to disk - let compiledResult = toJsonText (GhostToCms document) - let fileName = Path.GetFileNameWithoutExtension (args[0]) - File.WriteAllText ($"{Path.GetFileNameWithoutExtension (args[0])}__exported.json", compiledResult) - - | _ -> printfn "%s" "failed to parse ghost document" - +module Main + +open GhostSyntax +open CMSSyntax +open Newtonsoft.Json + +open FSharpPlus +open Fleece.Newtonsoft +open System.IO +open Converters + +[] +let main args = + let ghostDocument = ofJsonText (File.ReadAllText args[0]) + + match Option.ofResult ghostDocument with + | Some document -> + // Write the result to disk + let compiledResult = toJsonText (GhostToCms document) + let fileName = Path.GetFileNameWithoutExtension (args[0]) + File.WriteAllText ($"{Path.GetFileNameWithoutExtension (args[0])}__exported.json", compiledResult) + + | _ -> printfn "%s" "failed to parse ghost document" + 0 \ No newline at end of file From b8cda7f62ed62e81c9492cd00b80fc3ee33f856e Mon Sep 17 00:00:00 2001 From: Westley Date: Sat, 23 Sep 2023 17:53:24 +1000 Subject: [PATCH 2/4] Fixed the SQL scripts --- sqitch/deploy/02-create_frontend_table.sql | 2 +- sqitch/deploy/03-create_groups_table.sql | 1 + sqitch/deploy/04-create_person_table.sql | 2 +- sqitch/deploy/05-create_filesystem_table.sql | 4 ++-- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/sqitch/deploy/02-create_frontend_table.sql b/sqitch/deploy/02-create_frontend_table.sql index bf8fffdea..6b01f39a4 100644 --- a/sqitch/deploy/02-create_frontend_table.sql +++ b/sqitch/deploy/02-create_frontend_table.sql @@ -3,7 +3,7 @@ BEGIN; -CREATE TABLE frontend ( +CREATE TABLE IF NOT EXISTS frontend ( FrontendID SERIAL PRIMARY KEY, FrontendURL VARCHAR(100) ); diff --git a/sqitch/deploy/03-create_groups_table.sql b/sqitch/deploy/03-create_groups_table.sql index c399be5db..6c76d943e 100644 --- a/sqitch/deploy/03-create_groups_table.sql +++ b/sqitch/deploy/03-create_groups_table.sql @@ -7,6 +7,7 @@ BEGIN; CREATE EXTENSION IF NOT EXISTS hstore; SET timezone = 'Australia/Sydney'; +DROP TYPE IF EXISTS permissions_enum CASCADE; CREATE TYPE permissions_enum as ENUM ('read', 'write', 'delete'); CREATE TABLE IF NOT EXISTS groups ( diff --git a/sqitch/deploy/04-create_person_table.sql b/sqitch/deploy/04-create_person_table.sql index 974763606..b553a4159 100644 --- a/sqitch/deploy/04-create_person_table.sql +++ b/sqitch/deploy/04-create_person_table.sql @@ -4,7 +4,7 @@ BEGIN; -- XXX Add DDLs here. -CREATE TABLE person ( +CREATE TABLE IF NOT EXISTS person ( UID SERIAL PRIMARY KEY, Email VARCHAR(50) UNIQUE NOT NULL, First_name VARCHAR(50) NOT NULL, diff --git a/sqitch/deploy/05-create_filesystem_table.sql b/sqitch/deploy/05-create_filesystem_table.sql index 96b7eff11..65422e01b 100644 --- a/sqitch/deploy/05-create_filesystem_table.sql +++ b/sqitch/deploy/05-create_filesystem_table.sql @@ -8,7 +8,7 @@ SET timezone = 'Australia/Sydney'; CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; /* MetaData */ -CREATE TABLE metadata ( +CREATE TABLE IF NOT EXISTS metadata ( MetadataID uuid PRIMARY KEY DEFAULT uuid_generate_v4(), CreatedAt TIMESTAMP NOT NULL DEFAULT NOW() ); @@ -16,7 +16,7 @@ CREATE TABLE metadata ( /** The filesystem table models all file heirachies in our system **/ -CREATE TABLE filesystem ( +CREATE TABLE IF NOT EXISTS filesystem ( EntityID uuid PRIMARY KEY DEFAULT uuid_generate_v4(), LogicalName VARCHAR(50) NOT NULL, From a04c13ae2c15e2fa02f17fcbed4674468f1359f2 Mon Sep 17 00:00:00 2001 From: Westley Date: Fri, 6 Oct 2023 19:10:26 +1100 Subject: [PATCH 3/4] Attempted reverting some changes --- backend/Dockerfile | 28 +- backend/docs/WritingTests.md | 254 ++-- backend/endpoints/tests/volume_test.go | 224 ++-- docker-compose.yml | 194 +-- frontend/.dockerignore | 14 +- frontend/Dockerfile | 54 +- .../CreateCodeBlock_button/index.ts | 2 +- .../src/cse-ui-kit/assets/bold-button.svg | 32 +- .../cse-ui-kit/assets/centeralign-button.svg | 110 +- .../src/cse-ui-kit/assets/italics-button.svg | 80 +- .../assets/leftrightalign-button.svg | 110 +- .../cse-ui-kit/assets/underline-button.svg | 96 +- .../src/cse-ui-kit/assets/upload-content.svg | 100 +- utilities/createUsers.sh | 0 .../.paket/Paket.Restore.targets | 1114 ++++++++--------- utilities/ghost-exporter/Main.fs | 48 +- 16 files changed, 1230 insertions(+), 1230 deletions(-) mode change 100644 => 100755 utilities/createUsers.sh diff --git a/backend/Dockerfile b/backend/Dockerfile index 09258ea23..0197710ea 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,15 +1,15 @@ -FROM golang:1.19-alpine as app-builder -WORKDIR /go/src/app -COPY . . -RUN apk add git -# Static build required so that we can safely copy the binary over. -# `-tags timetzdata` embeds zone info from the "time/tzdata" package. -RUN CGO_ENABLED=0 go install -ldflags '-extldflags "-static"' -tags timetzdata - -FROM scratch -# the test program: -COPY --from=app-builder /go/bin/cms.csesoc.unsw.edu.au /cms.csesoc.unsw.edu.au -# the tls certificates: -# NB: this pulls directly from the upstream image, which already has ca-certificates: -COPY --from=alpine:latest /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ +FROM golang:1.19-alpine as app-builder +WORKDIR /go/src/app +COPY . . +RUN apk add git +# Static build required so that we can safely copy the binary over. +# `-tags timetzdata` embeds zone info from the "time/tzdata" package. +RUN CGO_ENABLED=0 go install -ldflags '-extldflags "-static"' -tags timetzdata + +FROM scratch +# the test program: +COPY --from=app-builder /go/bin/cms.csesoc.unsw.edu.au /cms.csesoc.unsw.edu.au +# the tls certificates: +# NB: this pulls directly from the upstream image, which already has ca-certificates: +COPY --from=alpine:latest /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ ENTRYPOINT ["/cms.csesoc.unsw.edu.au"] \ No newline at end of file diff --git a/backend/docs/WritingTests.md b/backend/docs/WritingTests.md index 6e3522a55..b51ff0428 100644 --- a/backend/docs/WritingTests.md +++ b/backend/docs/WritingTests.md @@ -1,128 +1,128 @@ -# Writing Tests -The following is just a small guide on the process of unit testing within our codebase. Most if not all of our unit tests are written using a combination of `gomock` (Go's mocking framework) and Go's inbuilt testing engine `go test`. - -## Using Go Test -Go has a really nice and clean testing library in the form of `testing`. Testing is usually done on a package/file level, that is for each file/package we have an appropriate set of tests. To mark a file as a test file we simply add the suffix `_test` to the end of it, eg. `concurrency.go` would have the corresponding test file `concurrency_test.go`. Generally test files look something like -```go -package math - -import ( - "testing" - // a nice library we use to make assertations a lot cleaner - "github.com/stretchr/testify/assert" -) - -// Tests must be marked with the "Test" prefix, this tells go that the following method is a test -// under the hood what actually happens is that go test is a code generation tool, this code generation tool generates a -// main function that invokes all methods starting with "Test", it then compiles and runs this generated file -func TestAdd(t *testing.T) { - // The normal Go way - result := 1 + 2 - if result != 3 { - t.ErrorF("1 + 2 wasnt 3 :O") - } - - // Our way of writing this test - assert := assert.New(t) - assert.Equal(1 + 2, 3) -} -``` -Once you've written your test it can be run with: -```sh -# To run specifically this test -go test myTest_test.go -# To run all tests in the current directory and subdirectories -go test ./... -``` - -## Project Specific Quirks -Theres some weird quirks when it comes to writing project specific tests, this includes stuff such as interface mocking and writing database bound tests. - -### Database Bound Tests -Generally it is preffered if your tests do not touch an actual database (see the section on mocking for how to acheive this) but sometimes it is just unavoidable, eg. you may be writing a test in the `repository` package which is inherently database bound. To allow you to write such tests we require that database-bound tests are wrapped in a transaction, luckilly there is a convenient package (+ some really hacky code that should be refactored someday) that both enforces that requirement and helps you acheive it. - -#### Testing Contexts -We write database bound tests by using a `testing context`, a testing context refers to a connection to any database, this can be the live CMS database, your local version or a test database you spun up for testing. All contexts implement the [context interface](https://github.com/csesoc/website/blob/main/backend/database/contexts/main.go#L25), all database queries should be made via the context interface. - -There are two main implementations of the context interface, those are the `liveContext` and the `TestingContext`, the `liveContext` will panic whenever it is used within a test, so when writing tests make sure your pass a `testingContext` as an argument. The implementation that `TestingContext` exposes wraps every SQL query in a transaction and rolls it back upon completion, this gives us complete isolation between unit tests. We can write a simple database bound test as follows -```go -func TestRootInsert(t *testing.T) { - testContext := database.GetDatabaseContext().(*contexts.TestingContext) - - // to start a transaction (a test) we have to wrap the test in the runTest function, if we do not run our tests - // via this function then the context panics whenever you try and send a query (for good reason :P) - testContext.RunTest(func() { - // your test here - }) - - // after your test finishes the context rollsback the constructed transaction -} -``` - -As a side-note, don't go looking around the database package, its a bit of a mess 😔, I'll have a refactoring ticket created some day to just clean up that package. - -### Interface Mocking -As should have been rather evident from the last section, writing database bounds tests can be a bit of a pain, we need to wrap our tests in a transaction to try and ensure complete test isolation. There is a potential workaround however and that is via writing better tests 😛. Consider a simple endpoint function that we wish to unit test, this endpoint will depend on: - - A database connection - - To access the Filesystem, Groups and Users tables - - A connection to our published/unpublished volumes - - A log - -When writing tests for this endpoint the naive thing to do would be to create a testing context, spin up a connection to the published/unpublished volumes and pass that into your function and finally assert that it did what you wanted it to. The issue with this approach is that now you're not just testing your singular function but your ENTIRE system which leads to really slow tests and makes refactoring a pain (since you may have to update several unrelated tests). The smarter approach would be to use a method known as interface mocking, the idea behind interface mocking is that we provide a fake implementation of our interfaces to our functions, we then use that fake implementation to asser that the function did exactly what we wanted it to do. Within the CMS we use `gomock` to generate interface mocks, a general guide on interface mocking and `gomock` can be found [here](https://thedevelopercafe.com/articles/mocking-interfaces-in-go-with-gomock-670b1a640b00). Generally when writing tests the only mocks that will be of importance to you are the `dependency factory` and `repository` mocks, these can be found [here](https://github.com/csesoc/website/blob/main/backend/database/repositories/mocks/models_mock.go), to use them simply import that package into your file. - -An example of how we use interface mocking in practice can be seen below: -```go -func TestValidEntityInfo(t *testing.T) { - controller := gomock.NewController(t) - assert := assert.New(t) - defer controller.Finish() - - // ==== test setup ===== - entityID := uuid.New() - - // Constructing a fake filesystem repository mock - // note that we feed it a "fake" implementation, we're basically saying that whenever this fake function is called - // return this fake data we set it up with - mockFileRepo := repMocks.NewMockIFilesystemRepository(controller) - mockFileRepo.EXPECT().GetEntryWithID(entityID).Return(repositories.FilesystemEntry{ - EntityID: entityID, - LogicalName: "random name", - IsDocument: false, - ParentFileID: repositories.FilesystemRootID, - ChildrenIDs: []uuid.UUID{}, - }, nil).Times(1) - - // creates a dependecy factory mock - mockDepFactory := createMockDependencyFactory(controller, mockFileRepo, true) - - // ==== test execution ===== - form := models.ValidInfoRequest{EntityID: entityID} - response := endpoints.GetEntityInfo(form, mockDepFactory) - - assert.Equal(response.Status, http.StatusOK) - // and notice down here how we're asserting that the fake data was created - assert.Equal(response.Response, models.EntityInfoResponse{ - EntityID: entityID, - EntityName: "random name", - IsDocument: false, - Parent: repositories.FilesystemRootID, - Children: []models.EntityInfoResponse{}, - }) -} - - -// createMockDependencyFactory just constructs an instance of a dependency factory mock -func createMockDependencyFactory(controller *gomock.Controller, mockFileRepo *repMocks.MockIFilesystemRepository, needsLogger bool) *mocks.MockDependencyFactory { - mockDepFactory := mocks.NewMockDependencyFactory(controller) - mockDepFactory.EXPECT().GetFilesystemRepo().Return(mockFileRepo) - - if needsLogger { - log := logger.OpenLog("new log") - mockDepFactory.EXPECT().GetLogger().Return(log) - } - - return mockDepFactory -} -``` - +# Writing Tests +The following is just a small guide on the process of unit testing within our codebase. Most if not all of our unit tests are written using a combination of `gomock` (Go's mocking framework) and Go's inbuilt testing engine `go test`. + +## Using Go Test +Go has a really nice and clean testing library in the form of `testing`. Testing is usually done on a package/file level, that is for each file/package we have an appropriate set of tests. To mark a file as a test file we simply add the suffix `_test` to the end of it, eg. `concurrency.go` would have the corresponding test file `concurrency_test.go`. Generally test files look something like +```go +package math + +import ( + "testing" + // a nice library we use to make assertations a lot cleaner + "github.com/stretchr/testify/assert" +) + +// Tests must be marked with the "Test" prefix, this tells go that the following method is a test +// under the hood what actually happens is that go test is a code generation tool, this code generation tool generates a +// main function that invokes all methods starting with "Test", it then compiles and runs this generated file +func TestAdd(t *testing.T) { + // The normal Go way + result := 1 + 2 + if result != 3 { + t.ErrorF("1 + 2 wasnt 3 :O") + } + + // Our way of writing this test + assert := assert.New(t) + assert.Equal(1 + 2, 3) +} +``` +Once you've written your test it can be run with: +```sh +# To run specifically this test +go test myTest_test.go +# To run all tests in the current directory and subdirectories +go test ./... +``` + +## Project Specific Quirks +Theres some weird quirks when it comes to writing project specific tests, this includes stuff such as interface mocking and writing database bound tests. + +### Database Bound Tests +Generally it is preffered if your tests do not touch an actual database (see the section on mocking for how to acheive this) but sometimes it is just unavoidable, eg. you may be writing a test in the `repository` package which is inherently database bound. To allow you to write such tests we require that database-bound tests are wrapped in a transaction, luckilly there is a convenient package (+ some really hacky code that should be refactored someday) that both enforces that requirement and helps you acheive it. + +#### Testing Contexts +We write database bound tests by using a `testing context`, a testing context refers to a connection to any database, this can be the live CMS database, your local version or a test database you spun up for testing. All contexts implement the [context interface](https://github.com/csesoc/website/blob/main/backend/database/contexts/main.go#L25), all database queries should be made via the context interface. + +There are two main implementations of the context interface, those are the `liveContext` and the `TestingContext`, the `liveContext` will panic whenever it is used within a test, so when writing tests make sure your pass a `testingContext` as an argument. The implementation that `TestingContext` exposes wraps every SQL query in a transaction and rolls it back upon completion, this gives us complete isolation between unit tests. We can write a simple database bound test as follows +```go +func TestRootInsert(t *testing.T) { + testContext := database.GetDatabaseContext().(*contexts.TestingContext) + + // to start a transaction (a test) we have to wrap the test in the runTest function, if we do not run our tests + // via this function then the context panics whenever you try and send a query (for good reason :P) + testContext.RunTest(func() { + // your test here + }) + + // after your test finishes the context rollsback the constructed transaction +} +``` + +As a side-note, don't go looking around the database package, its a bit of a mess 😔, I'll have a refactoring ticket created some day to just clean up that package. + +### Interface Mocking +As should have been rather evident from the last section, writing database bounds tests can be a bit of a pain, we need to wrap our tests in a transaction to try and ensure complete test isolation. There is a potential workaround however and that is via writing better tests 😛. Consider a simple endpoint function that we wish to unit test, this endpoint will depend on: + - A database connection + - To access the Filesystem, Groups and Users tables + - A connection to our published/unpublished volumes + - A log + +When writing tests for this endpoint the naive thing to do would be to create a testing context, spin up a connection to the published/unpublished volumes and pass that into your function and finally assert that it did what you wanted it to. The issue with this approach is that now you're not just testing your singular function but your ENTIRE system which leads to really slow tests and makes refactoring a pain (since you may have to update several unrelated tests). The smarter approach would be to use a method known as interface mocking, the idea behind interface mocking is that we provide a fake implementation of our interfaces to our functions, we then use that fake implementation to asser that the function did exactly what we wanted it to do. Within the CMS we use `gomock` to generate interface mocks, a general guide on interface mocking and `gomock` can be found [here](https://thedevelopercafe.com/articles/mocking-interfaces-in-go-with-gomock-670b1a640b00). Generally when writing tests the only mocks that will be of importance to you are the `dependency factory` and `repository` mocks, these can be found [here](https://github.com/csesoc/website/blob/main/backend/database/repositories/mocks/models_mock.go), to use them simply import that package into your file. + +An example of how we use interface mocking in practice can be seen below: +```go +func TestValidEntityInfo(t *testing.T) { + controller := gomock.NewController(t) + assert := assert.New(t) + defer controller.Finish() + + // ==== test setup ===== + entityID := uuid.New() + + // Constructing a fake filesystem repository mock + // note that we feed it a "fake" implementation, we're basically saying that whenever this fake function is called + // return this fake data we set it up with + mockFileRepo := repMocks.NewMockIFilesystemRepository(controller) + mockFileRepo.EXPECT().GetEntryWithID(entityID).Return(repositories.FilesystemEntry{ + EntityID: entityID, + LogicalName: "random name", + IsDocument: false, + ParentFileID: repositories.FilesystemRootID, + ChildrenIDs: []uuid.UUID{}, + }, nil).Times(1) + + // creates a dependecy factory mock + mockDepFactory := createMockDependencyFactory(controller, mockFileRepo, true) + + // ==== test execution ===== + form := models.ValidInfoRequest{EntityID: entityID} + response := endpoints.GetEntityInfo(form, mockDepFactory) + + assert.Equal(response.Status, http.StatusOK) + // and notice down here how we're asserting that the fake data was created + assert.Equal(response.Response, models.EntityInfoResponse{ + EntityID: entityID, + EntityName: "random name", + IsDocument: false, + Parent: repositories.FilesystemRootID, + Children: []models.EntityInfoResponse{}, + }) +} + + +// createMockDependencyFactory just constructs an instance of a dependency factory mock +func createMockDependencyFactory(controller *gomock.Controller, mockFileRepo *repMocks.MockIFilesystemRepository, needsLogger bool) *mocks.MockDependencyFactory { + mockDepFactory := mocks.NewMockDependencyFactory(controller) + mockDepFactory.EXPECT().GetFilesystemRepo().Return(mockFileRepo) + + if needsLogger { + log := logger.OpenLog("new log") + mockDepFactory.EXPECT().GetLogger().Return(log) + } + + return mockDepFactory +} +``` + And thats it! That's how we do testing in the CMS. Our current test suite isn't particularly expansive and thats definitely something we're trying to improve at the moment before we move on to the next team. \ No newline at end of file diff --git a/backend/endpoints/tests/volume_test.go b/backend/endpoints/tests/volume_test.go index b1e427d17..65da3454e 100644 --- a/backend/endpoints/tests/volume_test.go +++ b/backend/endpoints/tests/volume_test.go @@ -1,112 +1,112 @@ -package tests - -import ( - "io/ioutil" - "net/http" - "os" - "testing" - - "cms.csesoc.unsw.edu.au/database/repositories" - repMocks "cms.csesoc.unsw.edu.au/database/repositories/mocks" - "cms.csesoc.unsw.edu.au/endpoints" - "cms.csesoc.unsw.edu.au/endpoints/models" - "github.com/golang/mock/gomock" - "github.com/google/uuid" - "github.com/stretchr/testify/assert" -) - -func TestUploadDocument(t *testing.T) { -} - -func TestGetPublishedDocument(t *testing.T) { - controller := gomock.NewController(t) - assert := assert.New(t) - defer controller.Finish() - - // ==== test setup ===== - entityID := uuid.New() - - tempFile, _ := ioutil.TempFile(os.TempDir(), "expected") - if _, err := tempFile.WriteString("hello world"); err != nil { - panic(err) - } - tempFile.Seek(0, 0) - defer os.Remove(tempFile.Name()) - - mockDockerFileSystemRepo := repMocks.NewMockIPublishedVolumeRepository(controller) - mockDockerFileSystemRepo.EXPECT().GetFromVolume(entityID.String()).Return(tempFile, nil).Times(1) - - mockDepFactory := createMockDependencyFactory(controller, nil, true) - mockDepFactory.EXPECT().GetPublishedVolumeRepo().Return(mockDockerFileSystemRepo) - - // // ==== test execution ===== - form := models.ValidGetPublishedDocumentRequest{DocumentID: entityID} - response := endpoints.GetPublishedDocument(form, mockDepFactory) - - assert.Equal(response.Status, http.StatusOK) - assert.Equal(response.Response, []byte("{\"Contents\": hello world}")) -} - -func TestUploadImage(t *testing.T) { - controller := gomock.NewController(t) - assert := assert.New(t) - defer controller.Finish() - - // ==== test setup ===== - entityID := uuid.New() - parentID := uuid.New() - entityToCreate := repositories.FilesystemEntry{ - LogicalName: "a.png", - ParentFileID: parentID, - IsDocument: false, - OwnerUserId: 1, - } - - mockFileRepo := repMocks.NewMockIFilesystemRepository(controller) - mockFileRepo.EXPECT().CreateEntry(entityToCreate).Return(repositories.FilesystemEntry{ - EntityID: entityID, - LogicalName: "a.png", - IsDocument: false, - ChildrenIDs: []uuid.UUID{}, - ParentFileID: parentID, - }, nil).Times(1) - - tempFile, _ := ioutil.TempFile(os.TempDir(), "expected") - defer os.Remove(tempFile.Name()) - - mockDockerFileSystemRepo := repMocks.NewMockIUnpublishedVolumeRepository(controller) - mockDockerFileSystemRepo.EXPECT().AddToVolume(entityID.String()).Return(nil).Times(1) - mockDockerFileSystemRepo.EXPECT().GetFromVolume(entityID.String()).Return(tempFile, nil).Times(1) - - mockDepFactory := createMockDependencyFactory(controller, mockFileRepo, true) - mockDepFactory.EXPECT().GetUnpublishedVolumeRepo().Return(mockDockerFileSystemRepo) - - // Create request - const pngBytes = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8/5+hHgAHggJ/PchI7wAAAABJRU5ErkJggg==" - garbageFile, _ := ioutil.TempFile(os.TempDir(), "input") - if _, err := garbageFile.WriteString(pngBytes); err != nil { - panic(err) - } - garbageFile.Seek(0, 0) - - defer os.Remove(garbageFile.Name()) - - form := models.ValidImageUploadRequest{ - Parent: parentID, - LogicalName: "a.png", - OwnerGroup: 1, - Image: garbageFile, - } - - // ==== test execution ===== - response := endpoints.UploadImage(form, mockDepFactory) - assert.Equal(response.Status, http.StatusOK) - assert.Equal(response.Response, models.NewEntityResponse{ - NewID: entityID, - }) - - // Assert that the file was written to - content, err := os.ReadFile(tempFile.Name()) - assert.Nil(err) - assert.Equal([]byte(pngBytes), content) -} +package tests + +import ( + "io/ioutil" + "net/http" + "os" + "testing" + + "cms.csesoc.unsw.edu.au/database/repositories" + repMocks "cms.csesoc.unsw.edu.au/database/repositories/mocks" + "cms.csesoc.unsw.edu.au/endpoints" + "cms.csesoc.unsw.edu.au/endpoints/models" + "github.com/golang/mock/gomock" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" +) + +func TestUploadDocument(t *testing.T) { +} + +func TestGetPublishedDocument(t *testing.T) { + controller := gomock.NewController(t) + assert := assert.New(t) + defer controller.Finish() + + // ==== test setup ===== + entityID := uuid.New() + + tempFile, _ := ioutil.TempFile(os.TempDir(), "expected") + if _, err := tempFile.WriteString("hello world"); err != nil { + panic(err) + } + tempFile.Seek(0, 0) + defer os.Remove(tempFile.Name()) + + mockDockerFileSystemRepo := repMocks.NewMockIPublishedVolumeRepository(controller) + mockDockerFileSystemRepo.EXPECT().GetFromVolume(entityID.String()).Return(tempFile, nil).Times(1) + + mockDepFactory := createMockDependencyFactory(controller, nil, true) + mockDepFactory.EXPECT().GetPublishedVolumeRepo().Return(mockDockerFileSystemRepo) + + // // ==== test execution ===== + form := models.ValidGetPublishedDocumentRequest{DocumentID: entityID} + response := endpoints.GetPublishedDocument(form, mockDepFactory) + + assert.Equal(response.Status, http.StatusOK) + assert.Equal(response.Response, []byte("{\"Contents\": hello world}")) +} + +func TestUploadImage(t *testing.T) { + controller := gomock.NewController(t) + assert := assert.New(t) + defer controller.Finish() + + // ==== test setup ===== + entityID := uuid.New() + parentID := uuid.New() + entityToCreate := repositories.FilesystemEntry{ + LogicalName: "a.png", + ParentFileID: parentID, + IsDocument: false, + OwnerUserId: 1, + } + + mockFileRepo := repMocks.NewMockIFilesystemRepository(controller) + mockFileRepo.EXPECT().CreateEntry(entityToCreate).Return(repositories.FilesystemEntry{ + EntityID: entityID, + LogicalName: "a.png", + IsDocument: false, + ChildrenIDs: []uuid.UUID{}, + ParentFileID: parentID, + }, nil).Times(1) + + tempFile, _ := ioutil.TempFile(os.TempDir(), "expected") + defer os.Remove(tempFile.Name()) + + mockDockerFileSystemRepo := repMocks.NewMockIUnpublishedVolumeRepository(controller) + mockDockerFileSystemRepo.EXPECT().AddToVolume(entityID.String()).Return(nil).Times(1) + mockDockerFileSystemRepo.EXPECT().GetFromVolume(entityID.String()).Return(tempFile, nil).Times(1) + + mockDepFactory := createMockDependencyFactory(controller, mockFileRepo, true) + mockDepFactory.EXPECT().GetUnpublishedVolumeRepo().Return(mockDockerFileSystemRepo) + + // Create request + const pngBytes = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8/5+hHgAHggJ/PchI7wAAAABJRU5ErkJggg==" + garbageFile, _ := ioutil.TempFile(os.TempDir(), "input") + if _, err := garbageFile.WriteString(pngBytes); err != nil { + panic(err) + } + garbageFile.Seek(0, 0) + + defer os.Remove(garbageFile.Name()) + + form := models.ValidImageUploadRequest{ + Parent: parentID, + LogicalName: "a.png", + OwnerGroup: 1, + Image: garbageFile, + } + + // ==== test execution ===== + response := endpoints.UploadImage(form, mockDepFactory) + assert.Equal(response.Status, http.StatusOK) + assert.Equal(response.Response, models.NewEntityResponse{ + NewID: entityID, + }) + + // Assert that the file was written to + content, err := os.ReadFile(tempFile.Name()) + assert.Nil(err) + assert.Equal([]byte(pngBytes), content) +} diff --git a/docker-compose.yml b/docker-compose.yml index b0b9574bd..1d9b712e4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,98 +1,98 @@ -version: "3.8" - -services: - next: - container_name: next - build: - context: ./next - dockerfile: ./Dockerfile.development - volumes: - - './next:/next' - stdin_open: true - ports: - - 3001:3001 - - frontend: - container_name: frontend - build: - context: ./frontend - dockerfile: ./Dockerfile.development - volumes: - - './frontend:/app' - stdin_open: true - ports: - - 3000:3000 - - backend: - container_name: go_backend - build: - context: ./backend - dockerfile: ./Dockerfile.development - depends_on: - - migration - volumes: - - './backend:/go/src/cms.csesoc.unsw.edu.au' - - 'unpublished_document_data:/var/lib/documents/unpublished/data' - - 'published_document_data:/var/lib/documents/published/data' - ports: - - 8080:8080 - environment: - - FRONTEND_URI=${FRONTEND_URI} - - POSTGRES_USER=${PG_USER} - - POSTGRES_PASSWORD=${PG_PASSWORD} - - POSTGRES_DB=${PG_DB} - - POSTGRES_PORT=${PG_PORT} - - POSTGRES_HOST=${PG_HOST} - - db: - container_name: pg_container - image: postgres - restart: always - environment: - POSTGRES_USER: ${PG_USER} - POSTGRES_PASSWORD: ${PG_PASSWORD} - POSTGRES_DB: ${PG_DB} - ports: - - ${PG_PORT}:5432 - volumes: - - 'pg_data:/var/lib/postgresql/data' - - migration: - container_name: migration - build: - context: ./postgres - dockerfile: ./Dockerfile - depends_on: - - db - environment: - - POSTGRES_HOST=db - - POSTGRES_DB=${PG_DB} - - POSTGRES_USER=${PG_USER} - - POSTGRES_PASSWORD=${PG_PASSWORD} - - sqitch: - container_name: sqitch - build: - context: ./sqitch - dockerfile: ./Dockerfile - depends_on: - - db - - staging_db: - container_name: pg_container_testing - image: postgres - restart: always - user: postgres - environment: - POSTGRES_PASSWORD: test - POSTGRES_DB: cms_testing_db - ports: - - 1234:5432 - volumes: - - './postgres/up:/docker-entrypoint-initdb.d/' - - 'staging_pg_db:/var/lib/postgresql/data' -volumes: - pg_data: - staging_pg_db: - unpublished_document_data: +version: "3.8" + +services: + next: + container_name: next + build: + context: ./next + dockerfile: ./Dockerfile.development + volumes: + - './next:/next' + stdin_open: true + ports: + - 3001:3001 + + frontend: + container_name: frontend + build: + context: ./frontend + dockerfile: ./Dockerfile.development + volumes: + - './frontend:/app' + stdin_open: true + ports: + - 3000:3000 + + backend: + container_name: go_backend + build: + context: ./backend + dockerfile: ./Dockerfile.development + depends_on: + - migration + volumes: + - './backend:/go/src/cms.csesoc.unsw.edu.au' + - 'unpublished_document_data:/var/lib/documents/unpublished/data' + - 'published_document_data:/var/lib/documents/published/data' + ports: + - 8080:8080 + environment: + - FRONTEND_URI=${FRONTEND_URI} + - POSTGRES_USER=${PG_USER} + - POSTGRES_PASSWORD=${PG_PASSWORD} + - POSTGRES_DB=${PG_DB} + - POSTGRES_PORT=${PG_PORT} + - POSTGRES_HOST=${PG_HOST} + + db: + container_name: pg_container + image: postgres + restart: always + environment: + POSTGRES_USER: ${PG_USER} + POSTGRES_PASSWORD: ${PG_PASSWORD} + POSTGRES_DB: ${PG_DB} + ports: + - ${PG_PORT}:5432 + volumes: + - 'pg_data:/var/lib/postgresql/data' + + migration: + container_name: migration + build: + context: ./postgres + dockerfile: ./Dockerfile + depends_on: + - db + environment: + - POSTGRES_HOST=db + - POSTGRES_DB=${PG_DB} + - POSTGRES_USER=${PG_USER} + - POSTGRES_PASSWORD=${PG_PASSWORD} + + sqitch: + container_name: sqitch + build: + context: ./sqitch + dockerfile: ./Dockerfile + depends_on: + - db + + staging_db: + container_name: pg_container_testing + image: postgres + restart: always + user: postgres + environment: + POSTGRES_PASSWORD: test + POSTGRES_DB: cms_testing_db + ports: + - 1234:5432 + volumes: + - './postgres/up:/docker-entrypoint-initdb.d/' + - 'staging_pg_db:/var/lib/postgresql/data' +volumes: + pg_data: + staging_pg_db: + unpublished_document_data: published_document_data: \ No newline at end of file diff --git a/frontend/.dockerignore b/frontend/.dockerignore index 10f2a898e..28204dd16 100644 --- a/frontend/.dockerignore +++ b/frontend/.dockerignore @@ -1,7 +1,7 @@ -**/node_modules -**/npm-debug.log -build -.dockerignore -Dockerfile -Dockerfile.prod -Dockerfile.development +**/node_modules +**/npm-debug.log +build +.dockerignore +Dockerfile +Dockerfile.prod +Dockerfile.development diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 914b5861a..ad3734b9d 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -1,27 +1,27 @@ -# Grab the latest Node base image -FROM node:20.2.0-alpine as builder - -# Set the current working directory inside the container -WORKDIR /app - -COPY package.json package-lock.json ./ -RUN npm install - -COPY . . - -RUN npm run build - -# nginx state for serving content -FROM nginx:1.23.1-alpine -COPY ./.nginx/nginx.conf /etc/nginx/nginx.conf -# Set working directory to nginx asset directory -WORKDIR /usr/share/nginx/html -# Remove default nginx static assets -RUN rm -rf ./* -# Copy static assets from builder stage -COPY --from=builder /app/build . - -EXPOSE 80 - -# Containers run nginx with global directives and daemon off -ENTRYPOINT ["nginx", "-g", "daemon off;"] +# Grab the latest Node base image +FROM node:20.2.0-alpine as builder + +# Set the current working directory inside the container +WORKDIR /app + +COPY package.json package-lock.json ./ +RUN npm install + +COPY . . + +RUN npm run build + +# nginx state for serving content +FROM nginx:1.23.1-alpine +COPY ./.nginx/nginx.conf /etc/nginx/nginx.conf +# Set working directory to nginx asset directory +WORKDIR /usr/share/nginx/html +# Remove default nginx static assets +RUN rm -rf ./* +# Copy static assets from builder stage +COPY --from=builder /app/build . + +EXPOSE 80 + +# Containers run nginx with global directives and daemon off +ENTRYPOINT ["nginx", "-g", "daemon off;"] diff --git a/frontend/src/cse-ui-kit/CreateCodeBlock_button/index.ts b/frontend/src/cse-ui-kit/CreateCodeBlock_button/index.ts index e97f2cff5..715546f48 100644 --- a/frontend/src/cse-ui-kit/CreateCodeBlock_button/index.ts +++ b/frontend/src/cse-ui-kit/CreateCodeBlock_button/index.ts @@ -1,3 +1,3 @@ import CreateCodeBlock from './CreateCodeBlock'; -export default CreateCodeBlock; +export default CreateCodeBlock; \ No newline at end of file diff --git a/frontend/src/cse-ui-kit/assets/bold-button.svg b/frontend/src/cse-ui-kit/assets/bold-button.svg index f4d3570b8..f43e94fa4 100644 --- a/frontend/src/cse-ui-kit/assets/bold-button.svg +++ b/frontend/src/cse-ui-kit/assets/bold-button.svg @@ -1,16 +1,16 @@ - - - - - - - - + + + + + + + + diff --git a/frontend/src/cse-ui-kit/assets/centeralign-button.svg b/frontend/src/cse-ui-kit/assets/centeralign-button.svg index e8b6ae269..e4f9b2776 100644 --- a/frontend/src/cse-ui-kit/assets/centeralign-button.svg +++ b/frontend/src/cse-ui-kit/assets/centeralign-button.svg @@ -1,55 +1,55 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/cse-ui-kit/assets/italics-button.svg b/frontend/src/cse-ui-kit/assets/italics-button.svg index 9fbcf2e81..52d05023d 100644 --- a/frontend/src/cse-ui-kit/assets/italics-button.svg +++ b/frontend/src/cse-ui-kit/assets/italics-button.svg @@ -1,40 +1,40 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/cse-ui-kit/assets/leftrightalign-button.svg b/frontend/src/cse-ui-kit/assets/leftrightalign-button.svg index c0e5b038b..f87de30d9 100644 --- a/frontend/src/cse-ui-kit/assets/leftrightalign-button.svg +++ b/frontend/src/cse-ui-kit/assets/leftrightalign-button.svg @@ -1,55 +1,55 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/cse-ui-kit/assets/underline-button.svg b/frontend/src/cse-ui-kit/assets/underline-button.svg index ed7928f79..de03f9764 100644 --- a/frontend/src/cse-ui-kit/assets/underline-button.svg +++ b/frontend/src/cse-ui-kit/assets/underline-button.svg @@ -1,48 +1,48 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/cse-ui-kit/assets/upload-content.svg b/frontend/src/cse-ui-kit/assets/upload-content.svg index 365578008..73a16c64f 100644 --- a/frontend/src/cse-ui-kit/assets/upload-content.svg +++ b/frontend/src/cse-ui-kit/assets/upload-content.svg @@ -1,50 +1,50 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/utilities/createUsers.sh b/utilities/createUsers.sh old mode 100644 new mode 100755 diff --git a/utilities/ghost-exporter/.paket/Paket.Restore.targets b/utilities/ghost-exporter/.paket/Paket.Restore.targets index 4deb15bcd..e230bb215 100644 --- a/utilities/ghost-exporter/.paket/Paket.Restore.targets +++ b/utilities/ghost-exporter/.paket/Paket.Restore.targets @@ -1,557 +1,557 @@ - - - - - - - $(MSBuildAllProjects);$(MSBuildThisFileFullPath) - - $(MSBuildVersion) - 15.0.0 - false - true - - true - $(MSBuildThisFileDirectory) - $(MSBuildThisFileDirectory)..\ - $(PaketRootPath)paket-files\paket.restore.cached - $(PaketRootPath)paket.lock - classic - proj - assembly - native - /Library/Frameworks/Mono.framework/Commands/mono - mono - - - $(PaketRootPath)paket.bootstrapper.exe - $(PaketToolsPath)paket.bootstrapper.exe - $([System.IO.Path]::GetDirectoryName("$(PaketBootStrapperExePath)"))\ - - "$(PaketBootStrapperExePath)" - $(MonoPath) --runtime=v4.0.30319 "$(PaketBootStrapperExePath)" - - - - - true - true - - - True - - - False - - $(BaseIntermediateOutputPath.TrimEnd('\').TrimEnd('\/')) - - - - - - - - - $(PaketRootPath)paket - $(PaketToolsPath)paket - - - - - - $(PaketRootPath)paket.exe - $(PaketToolsPath)paket.exe - - - - - - <_DotnetToolsJson Condition="Exists('$(PaketRootPath)/.config/dotnet-tools.json')">$([System.IO.File]::ReadAllText("$(PaketRootPath)/.config/dotnet-tools.json")) - <_ConfigContainsPaket Condition=" '$(_DotnetToolsJson)' != ''">$(_DotnetToolsJson.Contains('"paket"')) - <_ConfigContainsPaket Condition=" '$(_ConfigContainsPaket)' == ''">false - - - - - - - - - - - <_PaketCommand>dotnet paket - - - - - - $(PaketToolsPath)paket - $(PaketBootStrapperExeDir)paket - - - paket - - - - - <_PaketExeExtension>$([System.IO.Path]::GetExtension("$(PaketExePath)")) - <_PaketCommand Condition=" '$(_PaketCommand)' == '' AND '$(_PaketExeExtension)' == '.dll' ">dotnet "$(PaketExePath)" - <_PaketCommand Condition=" '$(_PaketCommand)' == '' AND '$(OS)' != 'Windows_NT' AND '$(_PaketExeExtension)' == '.exe' ">$(MonoPath) --runtime=v4.0.30319 "$(PaketExePath)" - <_PaketCommand Condition=" '$(_PaketCommand)' == '' ">"$(PaketExePath)" - - - - - - - - - - - - - - - - - - - - - true - $(NoWarn);NU1603;NU1604;NU1605;NU1608 - false - true - - - - - - - - - $([System.IO.File]::ReadAllText('$(PaketRestoreCacheFile)')) - - - - - - - $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `": "`)[0].Replace(`"`, ``).Replace(` `, ``)) - $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `": "`)[1].Replace(`"`, ``).Replace(` `, ``)) - - - - - %(PaketRestoreCachedKeyValue.Value) - %(PaketRestoreCachedKeyValue.Value) - - - - - true - false - true - - - - - true - - - - - - - - - - - - - - - - - - - $(PaketIntermediateOutputPath)\$(MSBuildProjectFile).paket.references.cached - - $(MSBuildProjectFullPath).paket.references - - $(MSBuildProjectDirectory)\$(MSBuildProjectName).paket.references - - $(MSBuildProjectDirectory)\paket.references - - false - true - true - references-file-or-cache-not-found - - - - - $([System.IO.File]::ReadAllText('$(PaketReferencesCachedFilePath)')) - $([System.IO.File]::ReadAllText('$(PaketOriginalReferencesFilePath)')) - references-file - false - - - - - false - - - - - true - target-framework '$(TargetFramework)' or '$(TargetFrameworks)' files @(PaketResolvedFilePaths) - - - - - - - - - - - false - true - - - - - - - - - - - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',').Length) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[0]) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[1]) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[4]) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[5]) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[6]) - $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[7]) - - - %(PaketReferencesFileLinesInfo.PackageVersion) - All - runtime - $(ExcludeAssets);contentFiles - $(ExcludeAssets);build;buildMultitargeting;buildTransitive - true - true - - - - - $(PaketIntermediateOutputPath)/$(MSBuildProjectFile).paket.clitools - - - - - - - - - $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[0]) - $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[1]) - - - %(PaketCliToolFileLinesInfo.PackageVersion) - - - - - - - - - - false - - - - - - <_NuspecFilesNewLocation Include="$(PaketIntermediateOutputPath)\$(Configuration)\*.nuspec"/> - - - - - - $(MSBuildProjectDirectory)/$(MSBuildProjectFile) - true - false - true - false - true - false - true - false - true - false - true - $(PaketIntermediateOutputPath)\$(Configuration) - $(PaketIntermediateOutputPath) - - - - <_NuspecFiles Include="$(AdjustedNuspecOutputPath)\*.$(PackageVersion.Split(`+`)[0]).nuspec"/> - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + $(MSBuildAllProjects);$(MSBuildThisFileFullPath) + + $(MSBuildVersion) + 15.0.0 + false + true + + true + $(MSBuildThisFileDirectory) + $(MSBuildThisFileDirectory)..\ + $(PaketRootPath)paket-files\paket.restore.cached + $(PaketRootPath)paket.lock + classic + proj + assembly + native + /Library/Frameworks/Mono.framework/Commands/mono + mono + + + $(PaketRootPath)paket.bootstrapper.exe + $(PaketToolsPath)paket.bootstrapper.exe + $([System.IO.Path]::GetDirectoryName("$(PaketBootStrapperExePath)"))\ + + "$(PaketBootStrapperExePath)" + $(MonoPath) --runtime=v4.0.30319 "$(PaketBootStrapperExePath)" + + + + + true + true + + + True + + + False + + $(BaseIntermediateOutputPath.TrimEnd('\').TrimEnd('\/')) + + + + + + + + + $(PaketRootPath)paket + $(PaketToolsPath)paket + + + + + + $(PaketRootPath)paket.exe + $(PaketToolsPath)paket.exe + + + + + + <_DotnetToolsJson Condition="Exists('$(PaketRootPath)/.config/dotnet-tools.json')">$([System.IO.File]::ReadAllText("$(PaketRootPath)/.config/dotnet-tools.json")) + <_ConfigContainsPaket Condition=" '$(_DotnetToolsJson)' != ''">$(_DotnetToolsJson.Contains('"paket"')) + <_ConfigContainsPaket Condition=" '$(_ConfigContainsPaket)' == ''">false + + + + + + + + + + + <_PaketCommand>dotnet paket + + + + + + $(PaketToolsPath)paket + $(PaketBootStrapperExeDir)paket + + + paket + + + + + <_PaketExeExtension>$([System.IO.Path]::GetExtension("$(PaketExePath)")) + <_PaketCommand Condition=" '$(_PaketCommand)' == '' AND '$(_PaketExeExtension)' == '.dll' ">dotnet "$(PaketExePath)" + <_PaketCommand Condition=" '$(_PaketCommand)' == '' AND '$(OS)' != 'Windows_NT' AND '$(_PaketExeExtension)' == '.exe' ">$(MonoPath) --runtime=v4.0.30319 "$(PaketExePath)" + <_PaketCommand Condition=" '$(_PaketCommand)' == '' ">"$(PaketExePath)" + + + + + + + + + + + + + + + + + + + + + true + $(NoWarn);NU1603;NU1604;NU1605;NU1608 + false + true + + + + + + + + + $([System.IO.File]::ReadAllText('$(PaketRestoreCacheFile)')) + + + + + + + $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `": "`)[0].Replace(`"`, ``).Replace(` `, ``)) + $([System.Text.RegularExpressions.Regex]::Split(`%(Identity)`, `": "`)[1].Replace(`"`, ``).Replace(` `, ``)) + + + + + %(PaketRestoreCachedKeyValue.Value) + %(PaketRestoreCachedKeyValue.Value) + + + + + true + false + true + + + + + true + + + + + + + + + + + + + + + + + + + $(PaketIntermediateOutputPath)\$(MSBuildProjectFile).paket.references.cached + + $(MSBuildProjectFullPath).paket.references + + $(MSBuildProjectDirectory)\$(MSBuildProjectName).paket.references + + $(MSBuildProjectDirectory)\paket.references + + false + true + true + references-file-or-cache-not-found + + + + + $([System.IO.File]::ReadAllText('$(PaketReferencesCachedFilePath)')) + $([System.IO.File]::ReadAllText('$(PaketOriginalReferencesFilePath)')) + references-file + false + + + + + false + + + + + true + target-framework '$(TargetFramework)' or '$(TargetFrameworks)' files @(PaketResolvedFilePaths) + + + + + + + + + + + false + true + + + + + + + + + + + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',').Length) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[0]) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[1]) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[4]) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[5]) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[6]) + $([System.String]::Copy('%(PaketReferencesFileLines.Identity)').Split(',')[7]) + + + %(PaketReferencesFileLinesInfo.PackageVersion) + All + runtime + $(ExcludeAssets);contentFiles + $(ExcludeAssets);build;buildMultitargeting;buildTransitive + true + true + + + + + $(PaketIntermediateOutputPath)/$(MSBuildProjectFile).paket.clitools + + + + + + + + + $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[0]) + $([System.String]::Copy('%(PaketCliToolFileLines.Identity)').Split(',')[1]) + + + %(PaketCliToolFileLinesInfo.PackageVersion) + + + + + + + + + + false + + + + + + <_NuspecFilesNewLocation Include="$(PaketIntermediateOutputPath)\$(Configuration)\*.nuspec"/> + + + + + + $(MSBuildProjectDirectory)/$(MSBuildProjectFile) + true + false + true + false + true + false + true + false + true + false + true + $(PaketIntermediateOutputPath)\$(Configuration) + $(PaketIntermediateOutputPath) + + + + <_NuspecFiles Include="$(AdjustedNuspecOutputPath)\*.$(PackageVersion.Split(`+`)[0]).nuspec"/> + + + + + + + + + + + + + + + + + + + + + + + diff --git a/utilities/ghost-exporter/Main.fs b/utilities/ghost-exporter/Main.fs index e9a4da00f..33d9e0cd7 100644 --- a/utilities/ghost-exporter/Main.fs +++ b/utilities/ghost-exporter/Main.fs @@ -1,25 +1,25 @@ -module Main - -open GhostSyntax -open CMSSyntax -open Newtonsoft.Json - -open FSharpPlus -open Fleece.Newtonsoft -open System.IO -open Converters - -[] -let main args = - let ghostDocument = ofJsonText (File.ReadAllText args[0]) - - match Option.ofResult ghostDocument with - | Some document -> - // Write the result to disk - let compiledResult = toJsonText (GhostToCms document) - let fileName = Path.GetFileNameWithoutExtension (args[0]) - File.WriteAllText ($"{Path.GetFileNameWithoutExtension (args[0])}__exported.json", compiledResult) - - | _ -> printfn "%s" "failed to parse ghost document" - +module Main + +open GhostSyntax +open CMSSyntax +open Newtonsoft.Json + +open FSharpPlus +open Fleece.Newtonsoft +open System.IO +open Converters + +[] +let main args = + let ghostDocument = ofJsonText (File.ReadAllText args[0]) + + match Option.ofResult ghostDocument with + | Some document -> + // Write the result to disk + let compiledResult = toJsonText (GhostToCms document) + let fileName = Path.GetFileNameWithoutExtension (args[0]) + File.WriteAllText ($"{Path.GetFileNameWithoutExtension (args[0])}__exported.json", compiledResult) + + | _ -> printfn "%s" "failed to parse ghost document" + 0 \ No newline at end of file From 2fd988432cdb1a5be08be4940becf49890ba10bb Mon Sep 17 00:00:00 2001 From: Westley Date: Sat, 7 Oct 2023 17:00:29 +1100 Subject: [PATCH 4/4] Added a README.md for the sqitch folder --- sqitch/README.md | 96 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 sqitch/README.md diff --git a/sqitch/README.md b/sqitch/README.md new file mode 100644 index 000000000..5d38868f8 --- /dev/null +++ b/sqitch/README.md @@ -0,0 +1,96 @@ +# Sqitch +Sqitch is an open source database change management tool, basically like git but specifically for databases. + +First, Make sure will ran `make dev-build`, so that the sqitch container is running. +Then cd into the `sqitch` folder. +As a sanity check, type `./sqitch help` into the command line and check that the following pops up: +``` +Usage + sqitch [--etc-path | --help | --man | --version] + sqitch [--chdir ] [--no-pager] [--quiet] [--verbose] + [] [] + +Common Commands + The most commonly used sqitch commands are: + + add Add a new change to the plan + bundle Bundle a Sqitch project for distribution + checkout Revert, checkout another VCS branch, and re-deploy changes + config Get and set local, user, or system options + deploy Deploy changes to a database + engine Manage database engine configuration + help Display help information about Sqitch commands + init Initialize a project + log Show change logs for a database + plan Show the contents of a plan + rebase Revert and redeploy database changes + revert Revert changes from a database + rework Duplicate a change in the plan and revise its scripts + show Show information about changes and tags, or change script contents + status Show the current deployment status of a database + tag Add or list tags in the plan + target Manage target database configuration + upgrade Upgrade the registry to the current version + verify Verify changes to a database + + See "sqitch help " or "sqitch help " to read about a + specific command or concept. See "sqitch help --guide" for a list of + conceptual guides. +``` + +## User config +Much like git, it will be great if we can tell who made a certain change to the database. +To do that, simply run the following commands. +`sqitch config --user user.name ''` +`sqitch config --user user.email ''` + +## Adding a new SQL script to the database +To add a new change to the database (Like a new SQl script, table or function), first run +`./sqitch add appschema -n ""` + +You will see the following changes: +1. New SQL files will be made in the `/deploy`, `/revert` and `/verify` folders. +2. The `sqitch.plan` file will be appended with your new change and message. + +The `/deploy` folder contains all the SQL scripts that will be ran when we deploy the database. +The `/revert` folder contains all the SQL scripts that will be ran when we need to revert the database to some previous state. (Usually just contains a bunch of DROP TABLE expressions) +The `/verify` folder contains all the SQL scripts to verify that a deploy did what it's supposed to. + +## Deploying changes +To deploy changes you made to the database simply type in: +`./sqitch deploy test` + +This will cause sqitch to run all the SQl scripts inside your `/deploy` folder in the order they are created. + +## Revert changes +To revert changes you made to a database, type +`./sqitch revert test --to @HEAD^` + +The `@HEAD` always points to *the last change deployed to the database* +The `^` appended tells Sqitch to select the change prior to the last deployed change. +You can add more `^` to go back further. +You can also use `@ROOT` which refers to the first change deployed to the database. + +## Verify changes +(Use this if you actually fill out the scripts inside of the `/verify` folder) + +Run `./sqitch verify test` to run verification checks. + +## Sqitch.conf +This file contains all the settings that sqitch will follow, you don't need to touch anything here. +The only thing of note is the +``` +[target "test"] + uri = db:pg://postgres:postgres@localhost:5432/test_db +``` + +This tells sqitch that `test` refers to the a specific database uri (Which is the current one we are using) +So you don't have to type the actual uri itself if you want to use sqitch, you just type `test` instead. + +## The .bat and .sh files +Don't remove them or touch them. +The sqitch docker container need them in order for you to interact with sqitch from the command line. + +## References +Refer to the official Sqitch page for more detailed documentation. +https://sqitch.org/ \ No newline at end of file