diff --git a/routes_test.go b/routes_test.go index 58aa2e7..f51a7ba 100644 --- a/routes_test.go +++ b/routes_test.go @@ -10,6 +10,15 @@ import ( "strings" ) +// Define some custom types to make response parsing easier +type SearchResponse struct { + SpotifyID string + Name string + Popularity int + Genres []string +} +type SearchResponseList []SearchResponse + func TestAlive(t *testing.T) { db := setupTestDatabase("testalive") env := &Env{db: db} @@ -84,14 +93,7 @@ func TestGetArtistByName(t *testing.T) { assert.Equal(t, 200, w.Code) - // Define some custom types to make response parsing easier - type SearchResponse struct { - SpotifyID string - Name string - } - type ResponseList []SearchResponse - - var resp ResponseList + var resp SearchResponseList err := json.NewDecoder(w.Body).Decode(&resp) if err != nil { assert.Fail(t, fmt.Sprintf("Could not validate and parse JSON response: %s", err.Error())) @@ -176,3 +178,73 @@ func TestCreateGenres(t *testing.T) { assert.Equal(t, "testgenre", genre.Name) } + +func TestGetPaginatedGenres(t *testing.T) { + db := setupTestDatabase("testgetpaginatedgenres") + env := &Env{db: db} + router := setupRouter(env, spotifyClientID, spotifyClientSecret) + + // Create a large list of fake genres to test paging + var testData []Genre + for i := 0; i < 100; i++ { + testData = append(testData, Genre{Name: fmt.Sprintf("test%d", i)}) + } + + dbResult := env.db.Create(&testData) + if dbResult.Error != nil { + assert.Fail(t, "Failed to save test into info into database") + } + + w := httptest.NewRecorder() + req, _ := http.NewRequest("GET", "/genres?page=3&page_size=12", nil) + router.ServeHTTP(w, req) + + assert.Equal(t, 200, w.Code) + assert.Equal(t, `["test24","test25","test26","test27","test28","test29","test30","test31","test32","test33","test34","test35"]`, w.Body.String()) +} + +func TestGetPaginatedArtistByName(t *testing.T) { + db := setupTestDatabase("testgetpaginatedartistbyname") + env := &Env{db: db} + router := setupRouter(env, spotifyClientID, spotifyClientSecret) + + // Create a large list of fake artist profiles to test paging + var testData []ArtistProfile + for i := 0; i < 100; i++ { + testName := fmt.Sprintf("test%d", i) + testArtist := ArtistProfile{ + Name: testName, + SpotifyID: testName, + Popularity: i, + Genres: []Genre{ Genre{Name: testName} }, + } + testData = append(testData, testArtist) + } + + dbResult := env.db.Create(&testData) + if dbResult.Error != nil { + assert.Fail(t, "Failed to save test info into database") + } + + w := httptest.NewRecorder() + req, _ := http.NewRequest("GET", "/artists?name=test&page=15&page_size=2", nil) + router.ServeHTTP(w, req) + + assert.Equal(t, 200, w.Code) + var resp SearchResponseList + err := json.NewDecoder(w.Body).Decode(&resp) + if err != nil { + assert.Fail(t, fmt.Sprintf("Could not validate and parse JSON response: %s", err.Error())) + } + + assert.Equal(t, "test28", resp[0].SpotifyID) + assert.Equal(t, "test28", resp[0].Name) + assert.Equal(t, "test28", resp[0].Genres[0]) + assert.Equal(t, 28, resp[0].Popularity) + + // Test end of the returned data is correct too + assert.Equal(t, "test29", resp[1].SpotifyID) + assert.Equal(t, "test29", resp[1].Name) + assert.Equal(t, "test29", resp[1].Genres[0]) + assert.Equal(t, 29, resp[1].Popularity) +}