Skip to content

Commit

Permalink
Merge pull request #49 from robotboy655/regression-fix2
Browse files Browse the repository at this point in the history
Regression fixes
  • Loading branch information
luttje authored Feb 18, 2024
2 parents b443e9f + 58c5226 commit 19ee5b3
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 13 deletions.
33 changes: 21 additions & 12 deletions src/api-writer/glua-api-writer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,17 @@ export const RESERVERD_KEYWORDS = new Set([
'while'
]);

type IndexedWikiPage = {
index: number;
page: WikiPage;
};

export class GluaApiWriter {
private readonly writtenClasses: Set<string> = new Set();
private readonly writtenLibraryGlobals: Set<string> = new Set();
private readonly pageOverrides: Map<string, string> = new Map();

private readonly files: Map<string, WikiPage[]> = new Map();
private readonly files: Map<string, IndexedWikiPage[]> = new Map();

constructor() { }

Expand Down Expand Up @@ -274,32 +279,36 @@ export class GluaApiWriter {
return api;
}

public writePages(pages: WikiPage[], filePath: string) {
public writePages(pages: WikiPage[], filePath: string, index: number = 0) {
if (!this.files.has(filePath)) this.files.set(filePath, []);
this.files.get(filePath)!.push(...pages);

pages.forEach(page => {
this.files.get(filePath)!.push({index: index, page: page});
});
}

public getPages(filePath: string) {
return this.files.get(filePath) ?? [];
}

public makeApiFromPages(pages: WikiPage[]) {
public makeApiFromPages(pages: IndexedWikiPage[]) {
let api = "";

// First we write the "header" types
for (const page of pages.filter(x => isClass(x) || isLibrary(x))) {
api += this.writePage(page);
}
pages.sort((a, b) => a.index - b.index);

for (const page of pages.filter(x => !isClass(x) && !isLibrary(x))) {
api += this.writePage(page);
}
// First we write the "header" types
for (const page of pages.filter(x => isClass(x.page) || isLibrary(x.page) || isPanel(x.page))) {
api += this.writePage(page.page);
}
for (const page of pages.filter(x => !isClass(x.page) && !isLibrary(x.page) && !isPanel(x.page))) {
api += this.writePage(page.page);
}

return api;
}

public writeToDisk() {
this.files.forEach((pages: WikiPage[], filePath: string) => {
this.files.forEach((pages: IndexedWikiPage[], filePath: string) => {
let api = this.makeApiFromPages(pages);

if (api.length > 0) {
Expand Down
4 changes: 3 additions & 1 deletion src/cli-scraper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,12 @@ async function startScrape() {
console.log('Scraping all pages...');
let scrape_start = performance.now();

let cur = 0;
let queue: Promise<any>[] = [];
for (const pageIndex of pageIndexes) {
const pageMarkupScraper = new WikiPageMarkupScraper(`${baseUrl}/${pageIndex.address}?format=text`);

const indexForThis = cur++;
pageMarkupScraper.on('scraped', (url, pageMarkups) => {
if (pageMarkups.length === 0)
return;
Expand All @@ -118,7 +120,7 @@ async function startScrape() {
const moduleFile = path.join(baseDirectory, moduleName);

// Write Lua API docs
writer.writePages(pageMarkups, path.join(baseDirectory, `${moduleName}.lua`));
writer.writePages(pageMarkups, path.join(baseDirectory, `${moduleName}.lua`), indexForThis);

// Write JSON data
if (!fs.existsSync(moduleFile))
Expand Down

0 comments on commit 19ee5b3

Please sign in to comment.