Skip to content

Commit 121c0e9

Browse files
committed
test
1 parent f35705d commit 121c0e9

1 file changed

Lines changed: 57 additions & 30 deletions

File tree

.github/workflows/check-models.yml

Lines changed: 57 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -31,43 +31,70 @@ jobs:
3131
const puppeteer = require('puppeteer');
3232
3333
(async () => {
34-
const browser = await puppeteer.launch({ headless: 'new', args: ['--no-sandbox'] });
35-
const page = await browser.newPage();
36-
37-
await page.goto('https://docs.github.com/en/copilot/reference/ai-models/supported-models', {
38-
waitUntil: 'networkidle2',
39-
timeout: 30000
40-
});
41-
42-
// Wait for the table to load
43-
await page.waitForSelector('table', { timeout: 10000 });
44-
45-
// Extract model names from the first column of the table
46-
const models = await page.evaluate(() => {
47-
const rows = document.querySelectorAll('table tbody tr');
48-
const modelNames = [];
49-
rows.forEach(row => {
50-
const firstCell = row.querySelector('td:first-child');
51-
if (firstCell) {
52-
const text = firstCell.textContent.trim();
53-
if (text) modelNames.push(text);
54-
}
34+
try {
35+
const browser = await puppeteer.launch({
36+
headless: 'new',
37+
args: ['--no-sandbox', '--disable-setuid-sandbox']
5538
});
56-
return modelNames;
57-
});
58-
59-
console.log(JSON.stringify(models));
60-
await browser.close();
39+
const page = await browser.newPage();
40+
41+
console.error('Navigating to page...');
42+
await page.goto('https://docs.github.com/en/copilot/reference/ai-models/supported-models', {
43+
waitUntil: 'networkidle0',
44+
timeout: 60000
45+
});
46+
47+
console.error('Waiting for content to load...');
48+
await page.waitForTimeout(5000);
49+
50+
// Extract model names from the tables
51+
const models = await page.evaluate(() => {
52+
const modelNames = [];
53+
54+
// Find all tables on the page
55+
const tables = document.querySelectorAll('table');
56+
console.error(`Found ${tables.length} tables`);
57+
58+
tables.forEach((table, tableIndex) => {
59+
const rows = table.querySelectorAll('tbody tr');
60+
console.error(`Table ${tableIndex}: Found ${rows.length} rows`);
61+
62+
rows.forEach((row, rowIndex) => {
63+
const cells = row.querySelectorAll('td');
64+
if (cells.length > 0) {
65+
// Get text from first cell and clean it
66+
let text = cells[0].textContent.trim();
67+
console.error(`Table ${tableIndex}, Row ${rowIndex}: "${text}"`);
68+
69+
if (text && text.length > 0) {
70+
modelNames.push(text);
71+
}
72+
}
73+
});
74+
});
75+
76+
// Remove duplicates
77+
return [...new Set(modelNames)];
78+
});
79+
80+
console.error(`Extracted ${models.length} unique models`);
81+
console.log(JSON.stringify(models));
82+
await browser.close();
83+
} catch (error) {
84+
console.error('Error:', error.message);
85+
process.exit(1);
86+
}
6187
})();
6288
SCRAPE_EOF
6389
6490
# Run the script and capture output
65-
MODELS_JSON=$(node scrape.js)
66-
echo "Scraped models: $MODELS_JSON"
91+
MODELS_JSON=$(node scrape.js 2>&1 | tee /dev/stderr | tail -n 1)
92+
echo "Scraped models JSON: $MODELS_JSON"
6793
68-
# Store the models
94+
# Store the models, one per line
6995
echo "$MODELS_JSON" | jq -r '.[]' > models.txt
70-
echo "Models extracted and saved to models.txt"
96+
echo "Models extracted:"
97+
cat models.txt
7198
7299
- name: List available models from GitHub Models API
73100
id: list_models

0 commit comments

Comments
 (0)