Merge branch '22-integrate-api-and-frontend' into 36-refactor-dataset-class
This commit is contained in:
commit
91a365fdd3
@ -1,10 +1,13 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.Dataset;
|
||||
|
||||
import java.net.URL;
|
||||
import java.time.LocalDate;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.category.Category;
|
||||
|
||||
import java.sql.Date;
|
||||
|
||||
import jakarta.persistence.Entity;
|
||||
@ -43,12 +46,14 @@ public class Dataset {
|
||||
|
||||
private URL url;
|
||||
|
||||
private String licence;
|
||||
|
||||
private static final List<String> sortable = Arrays.asList("author", "title", "upvotes", "date");
|
||||
|
||||
@ManyToOne
|
||||
private Category categorie;
|
||||
|
||||
public Dataset(String title, String abst, String description, String author, URL url, Category categories, Type type) {
|
||||
public Dataset(String title, String abst, String description, String author, URL url, Category categories, Type type, String licence) {
|
||||
|
||||
this.raiting = 0;
|
||||
this.votes = 0;
|
||||
@ -61,6 +66,7 @@ public class Dataset {
|
||||
setCategorie(categories);
|
||||
setType(type);
|
||||
setUrl(url);
|
||||
setLicence(licence);
|
||||
}
|
||||
|
||||
public Dataset() {
|
||||
@ -115,6 +121,10 @@ public class Dataset {
|
||||
return url;
|
||||
}
|
||||
|
||||
public String getLicence() {
|
||||
return licence;
|
||||
}
|
||||
|
||||
public static List<String> getSort() {
|
||||
return sortable;
|
||||
}
|
||||
@ -163,4 +173,8 @@ public class Dataset {
|
||||
upvotes--;
|
||||
}
|
||||
|
||||
public void setLicence(String licence) {
|
||||
this.licence = licence;
|
||||
}
|
||||
|
||||
}
|
@ -1,15 +1,9 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.controler;
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.Dataset;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.model.Category;
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.model.Dataset;
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.model.DatasetService;
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.model.Type;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.web.config.EnableSpringDataWebSupport;
|
||||
import org.springframework.http.HttpStatus;
|
||||
@ -20,6 +14,8 @@ import java.util.UUID;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.category.Category;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/v1/datasets")
|
||||
@EnableSpringDataWebSupport
|
@ -1,4 +1,4 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.Dataset;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.util.List;
|
||||
@ -7,6 +7,10 @@ import java.util.UUID;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.category.Category;
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.category.CategoryRepository;
|
||||
|
||||
import org.springframework.data.domain.Page;
|
||||
|
||||
@Service
|
@ -0,0 +1,6 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.Dataset;
|
||||
|
||||
public enum Type {
|
||||
DATASET,
|
||||
API
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.Dataset;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
@ -11,6 +11,8 @@ import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.category.Category;
|
||||
|
||||
public interface dataRepository extends JpaRepository<Dataset, UUID> {
|
||||
|
||||
Dataset getDatasetById(UUID id);
|
@ -1,10 +1,11 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.category;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.springframework.context.annotation.Lazy;
|
||||
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.Dataset.Dataset;
|
||||
import jakarta.persistence.Entity;
|
||||
import jakarta.persistence.GeneratedValue;
|
||||
import jakarta.persistence.GenerationType;
|
@ -1,14 +1,10 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.controler;
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.category;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.ResponseStatus;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.model.CategoryDto;
|
||||
import de.uni_passau.fim.PADAS.group3.DataDash.model.CategoryService;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.ResponseEntity;
|
@ -1,4 +1,4 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.category;
|
||||
|
||||
import java.util.UUID;
|
||||
|
@ -1,4 +1,4 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.category;
|
||||
|
||||
public class CategoryDtoMapper {
|
||||
|
@ -1,4 +1,4 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.category;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
@ -1,4 +1,4 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.category;
|
||||
|
||||
import org.springframework.stereotype.Service;
|
||||
import java.util.List;
|
@ -1,12 +0,0 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.controler;
|
||||
|
||||
import org.springframework.stereotype.Controller;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
|
||||
@Controller
|
||||
public class PageController {
|
||||
@GetMapping("/add")
|
||||
public String getAddPage() {
|
||||
return "add";
|
||||
}
|
||||
}
|
@ -1,40 +0,0 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.boot.CommandLineRunner;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
|
||||
|
||||
@Configuration
|
||||
public class LoadDummyDatabase {
|
||||
|
||||
private static final org.slf4j.Logger log = LoggerFactory.getLogger(LoadDummyDatabase.class);
|
||||
|
||||
//@Bean
|
||||
CommandLineRunner initDatabase(dataRepository repository, CategoryRepository categoryRepository) {
|
||||
|
||||
|
||||
|
||||
return args -> {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
Category category = new Category("Category" + i);
|
||||
log.info("Preloading" + categoryRepository.save(category));
|
||||
|
||||
Dataset dataset = new Dataset("Title" + i, "Abst" + i, "Description" + i, "Author" + i,null, category, Type.API);
|
||||
for (int j = 0; j < new Random().nextInt(50); j++) {
|
||||
dataset.upvote();
|
||||
}
|
||||
log.info("Preloading" + repository.save(dataset));
|
||||
log.info("Preloading" + categoryRepository.save(category));
|
||||
}
|
||||
List<Dataset> s = repository.findByTitleLike("%Title%");
|
||||
log.info("Found Entry with ID: " + s.get(1).getId());};
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||
|
||||
public enum Type {
|
||||
DATASET,
|
||||
API
|
||||
}
|
@ -10,21 +10,21 @@ INSERT INTO category (id, name) VALUES
|
||||
('123e4567-e89b-12d3-a456-426614174002', 'Health');
|
||||
|
||||
-- Insert sample data into dataset
|
||||
INSERT INTO dataset (date, raiting, upvotes, votes, categorie_id, id, abst, author, description, title, url, type) VALUES
|
||||
('2023-01-01', 4.5, 100, 120, '123e4567-e89b-12d3-a456-426614174000', '123e4567-e89b-12d3-a456-426614174100', 'Abstract 1', 'Author 1', 'Description 1', 'Title 1', 'http://example.com/1', 'API'),
|
||||
('2023-01-02', 4.7, 150, 170, '123e4567-e89b-12d3-a456-426614174001', '123e4567-e89b-12d3-a456-426614174101', 'Abstract 2', 'Author 2', 'Description 2', 'Title 2', 'http://example.com/2', 'DATASET'),
|
||||
('2023-01-03', 4.9, 200, 220, '123e4567-e89b-12d3-a456-426614174002', '123e4567-e89b-12d3-a456-426614174102', 'Abstract 3', 'Author 3', 'Description 3', 'Title 3', 'http://example.com/3', 'API'),
|
||||
('2023-01-04', 4.2, 80, 100, '123e4567-e89b-12d3-a456-426614174003', '123e4567-e89b-12d3-a456-426614174103', 'Abstract 4', 'Author 4', 'Description 4', 'Title 4', 'http://example.com/4', 'DATASET'),
|
||||
('2023-01-05', 4.6, 120, 140, '123e4567-e89b-12d3-a456-426614174004', '123e4567-e89b-12d3-a456-426614174104', 'Abstract 5', 'Author 5', 'Description 5', 'Title 5', 'http://example.com/5', 'API');
|
||||
INSERT INTO dataset (date, raiting, upvotes, votes, categorie_id, id, abst, author, description, title, url, type, licence) VALUES
|
||||
('2023-01-01', 4.5, 100, 120, '123e4567-e89b-12d3-a456-426614174000', '123e4567-e89b-12d3-a456-426614174100', 'Abstract 1', 'Author 1', 'Description 1', 'Title 1', 'http://example.com/1', 'API', 'MIT'),
|
||||
('2023-01-02', 4.7, 150, 170, '123e4567-e89b-12d3-a456-426614174001', '123e4567-e89b-12d3-a456-426614174101', 'Abstract 2', 'Author 2', 'Description 2', 'Title 2', 'http://example.com/2', 'DATASET', 'MIT'),
|
||||
('2023-01-03', 4.9, 200, 220, '123e4567-e89b-12d3-a456-426614174002', '123e4567-e89b-12d3-a456-426614174102', 'Abstract 3', 'Author 3', 'Description 3', 'Title 3', 'http://example.com/3', 'API', 'MIT'),
|
||||
('2023-01-04', 4.2, 80, 100, '123e4567-e89b-12d3-a456-426614174003', '123e4567-e89b-12d3-a456-426614174103', 'Abstract 4', 'Author 4', 'Description 4', 'Title 4', 'http://example.com/4', 'DATASET', 'MIT'),
|
||||
('2023-01-05', 4.6, 120, 140, '123e4567-e89b-12d3-a456-426614174004', '123e4567-e89b-12d3-a456-426614174104', 'Abstract 5', 'Author 5', 'Description 5', 'Title 5', 'http://example.com/5', 'API', 'MIT');
|
||||
-- Insert 10 more sample data into dataset
|
||||
INSERT INTO dataset (date, raiting, upvotes, votes, categorie_id, id, abst, author, description, title, url, type) VALUES
|
||||
('2023-01-06', 4.8, 180, 200, '123e4567-e89b-12d3-a456-426614174005', '123e4567-e89b-12d3-a456-426614174105', 'Abstract 6', 'Author 6', 'Description 6', 'Title 6', 'http://example.com/6', 'API'),
|
||||
('2023-01-07', 4.3, 90, 110, '123e4567-e89b-12d3-a456-426614174006', '123e4567-e89b-12d3-a456-426614174106', 'Abstract 7', 'Author 7', 'Description 7', 'Title 7', 'http://example.com/7', 'DATASET'),
|
||||
('2023-01-08', 4.7, 150, 170, '123e4567-e89b-12d3-a456-426614174007', '123e4567-e89b-12d3-a456-426614174107', 'Abstract 8', 'Author 8', 'Description 8', 'Title 8', 'http://example.com/8', 'API'),
|
||||
('2023-01-09', 4.9, 200, 220, '123e4567-e89b-12d3-a456-426614174000', '123e4567-e89b-12d3-a456-426614174108', 'Abstract 9', 'Author 9', 'Description 9', 'Title 9', 'http://example.com/9', 'DATASET'),
|
||||
('2023-01-10', 4.2, 80, 100, '123e4567-e89b-12d3-a456-426614174001', '123e4567-e89b-12d3-a456-426614174109', 'Abstract 10', 'Author 10', 'Description 10', 'Title 10', 'http://example.com/10', 'API'),
|
||||
('2023-11-11', 4.6, 120, 140, '123e4567-e89b-12d3-a456-426614174002', '123e4567-e89b-12d3-a456-426614174110', 'Abstract 11', 'Author 11', 'Description 11', 'Title 11', 'http://example.com/11', 'DATASET'),
|
||||
('2023-09-12', 4.8, 180, 200, '123e4567-e89b-12d3-a456-426614174003', '123e4567-e89b-12d3-a456-426614174111', 'Abstract 12', 'Author 12', 'Description 12', 'Title 12', 'http://example.com/12', 'API'),
|
||||
('2023-03-13', 4.3, 90, 110, '123e4567-e89b-12d3-a456-426614174004', '123e4567-e89b-12d3-a456-426614174112', 'Abstract 13', 'Author 13', 'Description 13', 'Title 13', 'http://example.com/13', 'DATASET'),
|
||||
('2021-01-14', 4.7, 150, 170, '123e4567-e89b-12d3-a456-426614174005', '123e4567-e89b-12d3-a456-426614174113', 'Abstract 14', 'Author 14', 'Description 14', 'Title 14', 'http://example.com/14', 'API'),
|
||||
('2024-01-15', 4.9, 200, 220, '123e4567-e89b-12d3-a456-426614174006', '123e4567-e89b-12d3-a456-426614174114', 'Abstract 15', 'Author 15', 'Description 15', 'Title 15', 'http://example.com/15', 'DATASET');
|
||||
INSERT INTO dataset (date, raiting, upvotes, votes, categorie_id, id, abst, author, description, title, url, type, licence) VALUES
|
||||
('2023-01-06', 4.8, 180, 200, '123e4567-e89b-12d3-a456-426614174005', '123e4567-e89b-12d3-a456-426614174105', 'Abstract 6', 'Author 6', 'Description 6', 'Title 6', 'http://example.com/6', 'API', 'MIT'),
|
||||
('2023-01-07', 4.3, 90, 110, '123e4567-e89b-12d3-a456-426614174006', '123e4567-e89b-12d3-a456-426614174106', 'Abstract 7', 'Author 7', 'Description 7', 'Title 7', 'http://example.com/7', 'DATASET', 'MIT'),
|
||||
('2023-01-08', 4.7, 150, 170, '123e4567-e89b-12d3-a456-426614174007', '123e4567-e89b-12d3-a456-426614174107', 'Abstract 8', 'Author 8', 'Description 8', 'Title 8', 'http://example.com/8', 'API', 'MIT'),
|
||||
('2023-01-09', 4.9, 200, 220, '123e4567-e89b-12d3-a456-426614174000', '123e4567-e89b-12d3-a456-426614174108', 'Abstract 9', 'Author 9', 'Description 9', 'Title 9', 'http://example.com/9', 'DATASET', 'MIT'),
|
||||
('2023-01-10', 4.2, 80, 100, '123e4567-e89b-12d3-a456-426614174001', '123e4567-e89b-12d3-a456-426614174109', 'Abstract 10', 'Author 10', 'Description 10', 'Title 10', 'http://example.com/10', 'API', 'MIT'),
|
||||
('2023-11-11', 4.6, 120, 140, '123e4567-e89b-12d3-a456-426614174002', '123e4567-e89b-12d3-a456-426614174110', 'Abstract 11', 'Author 11', 'Description 11', 'Title 11', 'http://example.com/11', 'DATASET', 'MIT'),
|
||||
('2023-09-12', 4.8, 180, 200, '123e4567-e89b-12d3-a456-426614174003', '123e4567-e89b-12d3-a456-426614174111', 'Abstract 12', 'Author 12', 'Description 12', 'Title 12', 'http://example.com/12', 'API', 'MIT'),
|
||||
('2023-03-13', 4.3, 90, 110, '123e4567-e89b-12d3-a456-426614174004', '123e4567-e89b-12d3-a456-426614174112', 'Abstract 13', 'Author 13', 'Description 13', 'Title 13', 'http://example.com/13', 'DATASET', 'MIT'),
|
||||
('2021-01-14', 4.7, 150, 170, '123e4567-e89b-12d3-a456-426614174005', '123e4567-e89b-12d3-a456-426614174113', 'Abstract 14', 'Author 14', 'Description 14', 'Title 14', 'http://example.com/14', 'API', 'MIT'),
|
||||
('2024-01-15', 4.9, 200, 220, '123e4567-e89b-12d3-a456-426614174006', '123e4567-e89b-12d3-a456-426614174114', 'Abstract 15', 'Author 15', 'Description 15', 'Title 15', 'http://example.com/15', 'DATASET', 'MIT');
|
@ -3,5 +3,5 @@ DROP TABLE IF EXISTS category;
|
||||
|
||||
|
||||
create table category (id uuid not null, name varchar(255), primary key (id));
|
||||
create table dataset (date date, raiting float(24) not null, upvotes integer not null, votes integer not null, categorie_id uuid, id uuid not null, abst varchar(255), author varchar(255), description varchar(255), title varchar(255), url varchar(255), type enum ('API','DATASET'), primary key (id));
|
||||
create table dataset (date date, raiting float(24) not null, upvotes integer not null, votes integer not null, categorie_id uuid, id uuid not null, abst varchar(255), author varchar(255), description varchar(255), title varchar(255), url varchar(255), type enum ('API','DATASET'), licence varchar(255), primary key (id));
|
||||
alter table if exists dataset add constraint FKq6qwq6u473f89h71s7rf97ruy foreign key (categorie_id) references category;
|
||||
|
@ -20,10 +20,12 @@ function parseContent(content, clearResults) {
|
||||
|
||||
if (content.length === 0) {
|
||||
nothingFoundElement.classList.remove("hidden");
|
||||
searchSection.querySelector(".datasets").classList.add("hidden");
|
||||
} else {
|
||||
nothingFoundElement.classList.add("hidden");
|
||||
|
||||
const datasets = content.map(dataset => Dataset.get(dataset.id) ?? new Dataset(dataset));
|
||||
searchSection.querySelector(".datasets").classList.remove("hidden");
|
||||
if (clearResults) {
|
||||
Array.from(searchSection.querySelectorAll(".datasets .dataset")).forEach(e => e.remove());
|
||||
}
|
||||
|
@ -78,10 +78,6 @@ header {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.hidden {
|
||||
display: none;
|
||||
}
|
||||
|
||||
#search-entry:focus-visible {
|
||||
outline: none;
|
||||
}
|
||||
@ -93,6 +89,11 @@ header {
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
.hidden {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
||||
@container (width < 60ch) {
|
||||
.datasets {
|
||||
grid-template-columns: 1fr;
|
||||
|
@ -9,6 +9,7 @@ export const lastQuery = {
|
||||
totalPages: 0,
|
||||
currentPage: 0,
|
||||
};
|
||||
const loadedCategories = new Set;
|
||||
|
||||
// definition of all buttons & sections
|
||||
const addButton = document.getElementById("add-btn");
|
||||
@ -33,22 +34,23 @@ addButton.addEventListener("click", () => {
|
||||
});
|
||||
|
||||
filterButton.addEventListener("change", () => {
|
||||
const filterString = filterButton.value;
|
||||
if (filterString !== filterButton.querySelector("#default-filter").value) {
|
||||
fetchQuery(createQuery(), true);
|
||||
}
|
||||
fetchQuery(createQuery(), true);
|
||||
});
|
||||
|
||||
filterButton.addEventListener("click", () => {
|
||||
fetchCategories();
|
||||
})
|
||||
|
||||
searchButton.addEventListener("click", () => {
|
||||
fetchQuery(createQuery(), true);
|
||||
|
||||
});
|
||||
|
||||
searchBar.addEventListener("input", () => {
|
||||
updateSections();
|
||||
clearTimeout(searchBarTimeout);
|
||||
searchBarTimeout = setTimeout(() => {
|
||||
fetchQuery(createQuery(), true);
|
||||
updateSections();
|
||||
}, searchDelay);
|
||||
});
|
||||
|
||||
@ -71,7 +73,7 @@ resetButton.addEventListener("click", () => {
|
||||
|
||||
// functions of the main page
|
||||
function navigateToAdd() {
|
||||
window.location.href = "/add"; //TODO: move to EventListner?
|
||||
window.location.href = "/add.html"; //TODO: move to EventListener?
|
||||
}
|
||||
|
||||
function getFilterQuery() {
|
||||
@ -81,7 +83,7 @@ function getFilterQuery() {
|
||||
} else if (document.querySelector('#filter-btn option:checked').parentElement.label === "Standard categories") {
|
||||
return ["type", filterString];
|
||||
} else {
|
||||
return ["category", filterString];
|
||||
return ["category", filterButton.options[filterButton.selectedIndex].value]
|
||||
}
|
||||
}
|
||||
|
||||
@ -135,15 +137,18 @@ function updateSections() {
|
||||
|
||||
// fetches the further categories used in the filter function
|
||||
function fetchCategories() {
|
||||
const fetchURL = new URL(
|
||||
"api/v1/categories", getBaseURL());
|
||||
const fetchURL = new URL("api/v1/categories", getBaseURL());
|
||||
fetch(fetchURL)
|
||||
.then(resp => resp.json())
|
||||
.then((data) => {
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let category = data[i].toLowerCase();
|
||||
category = category.charAt(0).toUpperCase() + category.slice(1);
|
||||
document.getElementById("other-categories").appendChild(new Option(category));
|
||||
let categoryName = data[i].name.toLowerCase();
|
||||
categoryName = categoryName.charAt(0).toUpperCase() + categoryName.slice(1);
|
||||
if (!loadedCategories.has(categoryName)) {
|
||||
let newCategory = new Option(categoryName, data[i].id);
|
||||
document.getElementById("other-categories").appendChild(newCategory);
|
||||
loadedCategories.add(categoryName);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user