Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into lee/fullstack
Browse files Browse the repository at this point in the history
  • Loading branch information
leehuwuj committed Nov 18, 2024
2 parents 875f1ff + 4663dec commit 60f2c2f
Show file tree
Hide file tree
Showing 10 changed files with 19 additions and 193 deletions.
5 changes: 5 additions & 0 deletions .changeset/pink-maps-joke.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"create-llama": patch
---

chore: bump react19 rc
4 changes: 0 additions & 4 deletions e2e/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,6 @@ export async function runCreateLlama({
...dataSourceArgs,
"--vector-db",
vectorDb,
"--open-ai-key",
process.env.OPENAI_API_KEY,
"--use-pnpm",
"--port",
port,
Expand All @@ -103,8 +101,6 @@ export async function runCreateLlama({
tools ?? "none",
"--observability",
"none",
"--llama-cloud-key",
process.env.LLAMA_CLOUD_API_KEY,
];

if (templateUI) {
Expand Down
3 changes: 2 additions & 1 deletion helpers/providers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import ora from "ora";
import { red } from "picocolors";
import prompts from "prompts";
import { ModelConfigParams, ModelConfigQuestionsParams } from ".";
import { isCI } from "../../questions";
import { questionHandlers } from "../../questions/utils";

const OPENAI_API_URL = "https://api.openai.com/v1";
Expand Down Expand Up @@ -30,7 +31,7 @@ export async function askOpenAIQuestions({
},
};

if (!config.apiKey) {
if (!config.apiKey && !isCI) {
const { key } = await prompts(
{
type: "text",
Expand Down
4 changes: 3 additions & 1 deletion questions/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@ import { askProQuestions } from "./questions";
import { askSimpleQuestions } from "./simple";
import { QuestionArgs, QuestionResults } from "./types";

export const isCI = ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1";

export const askQuestions = async (
args: QuestionArgs,
): Promise<QuestionResults> => {
if (ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1") {
if (isCI) {
return await getCIQuestionResults(args);
} else if (args.pro) {
// TODO: refactor pro questions to return a result object
Expand Down
3 changes: 2 additions & 1 deletion questions/questions.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { blue } from "picocolors";
import prompts from "prompts";
import { isCI } from ".";
import { COMMUNITY_OWNER, COMMUNITY_REPO } from "../helpers/constant";
import { EXAMPLE_FILE } from "../helpers/datasources";
import { getAvailableLlamapackOptions } from "../helpers/llama-pack";
Expand Down Expand Up @@ -379,7 +380,7 @@ export const askProQuestions = async (program: QuestionArgs) => {

// Ask for LlamaCloud API key when using a LlamaCloud index or LlamaParse
if (isUsingLlamaCloud || program.useLlamaParse) {
if (!program.llamaCloudKey) {
if (!program.llamaCloudKey && !isCI) {
// if already set, don't ask again
// Ask for LlamaCloud API key
const { llamaCloudKey } = await prompts(
Expand Down
6 changes: 2 additions & 4 deletions templates/types/streaming/fastapi/app/services/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,13 +242,11 @@ def _add_file_to_llama_cloud_index(
except ImportError as e:
raise ValueError("LlamaCloudFileService is not found") from e

project_id = index._get_project_id()
pipeline_id = index._get_pipeline_id()
# LlamaCloudIndex is a managed index so we can directly use the files
upload_file = (file_name, BytesIO(file_data))
doc_id = LLamaCloudFileService.add_file_to_pipeline(
project_id,
pipeline_id,
index.project.id,
index.pipeline.id,
upload_file,
custom_metadata={},
)
Expand Down
118 changes: 0 additions & 118 deletions templates/types/streaming/nextjs/app/components/ui/drawer.tsx

This file was deleted.

29 changes: 0 additions & 29 deletions templates/types/streaming/nextjs/app/components/ui/hover-card.tsx

This file was deleted.

27 changes: 0 additions & 27 deletions templates/types/streaming/nextjs/app/components/ui/progress.tsx

This file was deleted.

13 changes: 5 additions & 8 deletions templates/types/streaming/nextjs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,11 @@
"@apidevtools/swagger-parser": "^10.1.0",
"@e2b/code-interpreter": "0.0.9-beta.3",
"@radix-ui/react-collapsible": "^1.0.3",
"@radix-ui/react-hover-card": "^1.0.7",
"@radix-ui/react-progress": "^1.1.0",
"@radix-ui/react-select": "^2.1.1",
"@radix-ui/react-slot": "^1.0.2",
"@radix-ui/react-tabs": "^1.1.0",
"@llamaindex/chat-ui": "0.0.7",
"ai": "3.3.42",
"@llamaindex/chat-ui": "0.0.9",
"ai": "3.4.33",
"ajv": "^8.12.0",
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
Expand All @@ -28,16 +26,15 @@
"formdata-node": "^6.0.3",
"got": "^14.4.1",
"llamaindex": "0.8.2",
"lucide-react": "^0.294.0",
"lucide-react": "^0.460.0",
"next": "^15.0.3",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react": "19.0.0-rc-66855b96-20241106",
"react-dom": "19.0.0-rc-66855b96-20241106",
"papaparse": "^5.4.1",
"supports-color": "^8.1.1",
"tailwind-merge": "^2.1.0",
"tiktoken": "^1.0.15",
"uuid": "^9.0.1",
"vaul": "^0.9.1",
"marked": "^14.1.2"
},
"devDependencies": {
Expand Down

0 comments on commit 60f2c2f

Please sign in to comment.