feat: switch to llama on ovh
Some checks failed
Deploy / lint-build-deploy (push) Failing after 7s

This commit is contained in:
2026-01-19 20:06:09 +01:00
parent 4453fb7943
commit ef333ae7f2
10 changed files with 10465 additions and 805 deletions

12
.dockerignore Normal file
View File

@@ -0,0 +1,12 @@
Dockerfile
.dockerignore
node_modules
npm-debug.log
.git
.gitignore
.next
.env
.env.*
!.env.example
*.md
.husky

View File

@@ -1,11 +1,4 @@
NEXT_PUBLIC_CONTACT_EMAIL= NEXT_PUBLIC_CONTACT_EMAIL=
POSTGRES_DATABASE= OVHCLOUD_API_KEY=
POSTGRES_HOST= PURCHASE_REFLECTION_THRESHOLD=50
POSTGRES_PASSWORD= NUMBER_OF_WEEKS=4
POSTGRES_PRISMA_URL=
POSTGRES_URL=
POSTGRES_URL_NON_POOLING=
POSTGRES_URL_NO_SSL=
POSTGRES_USER=
PURCHASE_REFLECTION_THRESHOLD=
NUMBER_OF_WEEKS=

View File

@@ -0,0 +1,46 @@
name: Deploy
on:
push:
branches: [ main ]
jobs:
lint-build-deploy:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '20'
- name: Install dependencies
run: |
if [ -f package-lock.json ]; then
npm ci
else
npm install
fi
- name: Run linting
run: npm run lint
- name: Build
run: npm run build
- name: Deploy to VPS
env:
SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }}
run: |
mkdir -p ~/.ssh
echo "$SSH_PRIVATE_KEY" > ~/.ssh/id_ed25519
chmod 600 ~/.ssh/id_ed25519
ssh-keyscan -H 51.210.247.57 >> ~/.ssh/known_hosts
ssh debian@51.210.247.57 << 'EOF'
cd /home/debian/synthetic-consumer-data
git pull origin main
cd /home/debian/gitea
docker-compose up -d --build synthetic-consumer-data
EOF

64
Dockerfile Normal file
View File

@@ -0,0 +1,64 @@
FROM node:20-alpine AS base
# Install dependencies only when needed
FROM base AS deps
RUN apk add --no-cache libc6-compat
WORKDIR /app
# Install dependencies
COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* ./
RUN \
if [ -f yarn.lock ]; then yarn --frozen-lockfile; \
elif [ -f package-lock.json ]; then npm ci; \
elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm i --frozen-lockfile; \
else echo "Lockfile not found." && exit 1; \
fi
# Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
# Build arguments for environment variables needed at build time
ARG NEXT_PUBLIC_CONTACT_EMAIL
ENV NEXT_PUBLIC_CONTACT_EMAIL=$NEXT_PUBLIC_CONTACT_EMAIL
# Next.js telemetry
ENV NEXT_TELEMETRY_DISABLED=1
RUN \
if [ -f yarn.lock ]; then yarn run build; \
elif [ -f package-lock.json ]; then npm run build; \
elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm run build; \
else echo "Lockfile not found." && exit 1; \
fi
# Production image, copy all the files and run next
FROM base AS runner
WORKDIR /app
ENV NODE_ENV=production
ENV NEXT_TELEMETRY_DISABLED=1
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
COPY --from=builder /app/public ./public
# Set the correct permission for prerender cache
RUN mkdir .next
RUN chown nextjs:nodejs .next
# Automatically leverage output traces to reduce image size
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
USER nextjs
EXPOSE 3000
ENV PORT=3000
ENV HOSTNAME="0.0.0.0"
CMD ["node", "server.js"]

View File

@@ -1,6 +1,5 @@
import type { Metadata } from 'next'; import type { Metadata } from 'next';
import './globals.css'; import './globals.css';
import { Analytics } from '@vercel/analytics/next';
export const metadata: Metadata = { export const metadata: Metadata = {
title: 'Synthetic Consumers Data Generator', title: 'Synthetic Consumers Data Generator',
@@ -16,7 +15,6 @@ export default function RootLayout({
return ( return (
<html lang='en'> <html lang='en'>
<body>{children}</body> <body>{children}</body>
<Analytics />
</html> </html>
); );
} }

7
next.config.ts Normal file
View File

@@ -0,0 +1,7 @@
import type { NextConfig } from 'next';
const nextConfig: NextConfig = {
output: 'standalone'
};
export default nextConfig;

9426
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -16,10 +16,9 @@
"vercel:env": "vercel env pull .env" "vercel:env": "vercel env pull .env"
}, },
"dependencies": { "dependencies": {
"@vercel/analytics": "^1.5.0",
"ai": "^5.0.68",
"axios": "^1.12.0", "axios": "^1.12.0",
"class-variance-authority": "^0.7.1", "class-variance-authority": "^0.7.1",
"openai": "^4.77.0",
"clsx": "^2.1.1", "clsx": "^2.1.1",
"crypto": "^1.0.1", "crypto": "^1.0.1",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
@@ -34,7 +33,6 @@
"devDependencies": { "devDependencies": {
"@commitlint/cli": "^18.4.3", "@commitlint/cli": "^18.4.3",
"@commitlint/config-conventional": "^18.4.3", "@commitlint/config-conventional": "^18.4.3",
"@types/json-schema": "^7.0.15",
"@types/node": "^22.10.1", "@types/node": "^22.10.1",
"@types/react": "^18.3.12", "@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1", "@types/react-dom": "^18.3.1",

View File

@@ -1,6 +1,10 @@
import 'dotenv/config'; import 'dotenv/config';
import { generateText, tool, jsonSchema } from 'ai'; import OpenAI from 'openai';
import type { JSONSchema7 } from 'json-schema';
const ovhAI = new OpenAI({
apiKey: process.env.OVHCLOUD_API_KEY,
baseURL: 'https://oai.endpoints.kepler.ai.cloud.ovh.net/v1'
});
export interface BaseTool { export interface BaseTool {
readonly name: string; readonly name: string;
@@ -24,43 +28,61 @@ export async function makeRequest<T extends BaseTool>(
toolDef: T toolDef: T
): Promise<Record<string, unknown>> { ): Promise<Record<string, unknown>> {
try { try {
const { steps } = await generateText({ const completion = await ovhAI.chat.completions.create({
model: 'anthropic/claude-sonnet-4.5', model: 'Meta-Llama-3_3-70B-Instruct',
temperature: 1, temperature: 1,
tools: { max_tokens: 16000,
[toolDef.name]: tool({ tools: [
{
type: 'function',
function: {
name: toolDef.name,
description: toolDef.input_schema.description || '', description: toolDef.input_schema.description || '',
inputSchema: jsonSchema(toolDef.input_schema as JSONSchema7), parameters: {
execute: async args => args type: 'object',
}) properties: toolDef.input_schema.properties,
required: toolDef.input_schema.required
? [...toolDef.input_schema.required]
: undefined
}
}
}
],
tool_choice: {
type: 'function',
function: { name: toolDef.name }
}, },
toolChoice: { messages: [
type: 'tool', {
toolName: toolDef.name role: 'system',
content:
'You are a data generation assistant. Generate realistic, diverse synthetic data. You must respond ONLY with the function call. Do not include any text outside the function call.'
}, },
prompt {
role: 'user',
content: prompt
}
]
}); });
const toolCalls = steps.flatMap(step => step.toolCalls); const message = completion.choices[0]?.message;
if (!toolCalls || toolCalls.length === 0) { if (!message?.tool_calls || message.tool_calls.length === 0) {
throw new Error('No tool calls found in response'); throw new Error('No function call found in response');
} }
const typedCall = toolCalls[0] as unknown as { const toolCall = message.tool_calls[0];
toolName: string;
input: Record<string, unknown>;
};
if (typedCall.toolName !== toolDef.name) { if (toolCall.function.name !== toolDef.name) {
throw new Error( throw new Error(
`Expected tool ${toolDef.name} but got ${typedCall.toolName}` `Expected tool ${toolDef.name} but got ${toolCall.function.name}`
); );
} }
return typedCall.input; const result = JSON.parse(toolCall.function.arguments);
return result;
} catch (error) { } catch (error) {
console.error('Error making request:', error); console.error('Error making request:', error);
throw Error('Vercel AI Gateway client error.'); throw Error('OVH AI Endpoints client error.');
} }
} }

1622
yarn.lock

File diff suppressed because it is too large Load Diff