Skip to content

Commit

Permalink
[Vertex AI] Update Citation decoding to handle optional startIndex (
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewheard authored and 1998code committed May 14, 2024
1 parent 1df22f8 commit 0a85432
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 32 deletions.
24 changes: 22 additions & 2 deletions FirebaseVertexAI/Sources/GenerateContentResponse.swift
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ public struct CitationMetadata: Decodable {

/// A struct describing a source attribution.
@available(iOS 15.0, macOS 11.0, macCatalyst 15.0, *)
public struct Citation: Decodable {
public struct Citation {
/// The inclusive beginning of a sequence in a model response that derives from a cited source.
public let startIndex: Int

Expand All @@ -207,7 +207,7 @@ public struct Citation: Decodable {
/// A link to the cited source.
public let uri: String

/// The license the cited source work is distributed under.
/// The license the cited source work is distributed under, if specified.
public let license: String?
}

Expand Down Expand Up @@ -337,3 +337,23 @@ extension GenerateContentResponse.UsageMetadata: Decodable {
totalTokenCount = try container.decodeIfPresent(Int.self, forKey: .totalTokenCount) ?? 0
}
}

// MARK: - Codable Conformances

@available(iOS 15.0, macOS 11.0, macCatalyst 15.0, *)
extension Citation: Decodable {
enum CodingKeys: CodingKey {
case startIndex
case endIndex
case uri
case license
}

public init(from decoder: any Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
startIndex = try container.decodeIfPresent(Int.self, forKey: .startIndex) ?? 0
endIndex = try container.decode(Int.self, forKey: .endIndex)
uri = try container.decode(String.self, forKey: .uri)
license = try container.decodeIfPresent(String.self, forKey: .license)
}
}
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
data: {"candidates": [{"content": {"role": "model","parts": [{"text": "Some information"}]}}]}
data: {"candidates": [{"content": {"role": "model","parts": [{"text": "Some information"}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.043204036,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.082549304},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.046291895,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.071461484},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.100701615,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.06164962},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.13150747,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.040087357}]}]}

data: {"candidates": [{"content": {"role": "model","parts": [{"text": " More information"}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.06632687,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.03825006},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.07477004,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.048767097},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.13695431,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.059866417},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.046119746,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.036425155}]}]}
data: {"candidates": [{"content": {"role": "model","parts": [{"text": " Some information cited from an external source"}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.18982129,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.1337543},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.13637818,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.021906368},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.25404602,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.09073549},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.24202643,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.06548521}],"citationMetadata": {"citations": [{"endIndex": 128,"uri": "https://www.example.com/citation-1"},{"startIndex": 130,"endIndex": 265,"uri": "https://www.example.com/citation-2"}]}}]}

data: {"candidates": [{"content": {"role": "model","parts": [{"text": " Some information cited from an external source"}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.07850098,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.039416388},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.08035747,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.04885778},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.12273335,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.059646938},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.053206205,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.04099903}],"citationMetadata": {"citations": [{"startIndex": 31,"endIndex": 187,"uri": "https://www.example.com/citation-1"}]}}]}
data: {"candidates": [{"content": {"role": "model","parts": [{"text": " More information cited from an external source"}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.07850098,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.039416388},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.08035747,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.04885778},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.12273335,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.059646938},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.053206205,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.04099903}],"citationMetadata": {"citations": [{"startIndex": 272,"endIndex": 431,"uri": "https://www.example.com/citation-3","license": "mit"}]}}]}

data: {"candidates": [{"content": {"role": "model","parts": [{"text": " More information cited from an external source"}]},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.08803312,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.044183318},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.094176665,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.0575992},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.13660839,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.08035747},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.060197048,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.046464667}],"citationMetadata": {"citations": [{"startIndex": 73,"endIndex": 248,"uri": "https://www.example.com/citation-2"},{"startIndex": 133,"endIndex": 272,"uri": "https://www.example.com/citation-3", "license": "mit"}]}}]}

data: {"candidates": [{"content": {"role": "model","parts": [{"text": " More information"}]},"finishReason": "STOP","safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.12147716,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.0647717},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.11858909,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.053899158},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.14866412,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.08479541},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.05470151,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.045015533}]}],"usageMetadata": {"promptTokenCount": 9,"candidatesTokenCount": 163,"totalTokenCount": 172}}
data: {"candidates": [{"content": {"role": "model","parts": [{"text": " More information "}]},"finishReason": "STOP","safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE","probabilityScore": 0.16013464,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.11716747},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE","probabilityScore": 0.10818896,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.021990221},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE","probabilityScore": 0.2158462,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.07682221},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE","probabilityScore": 0.19636348,"severity": "HARM_SEVERITY_NEGLIGIBLE","severityScore": 0.08021325}]}],"usageMetadata": {"promptTokenCount": 9,"candidatesTokenCount": 53,"totalTokenCount": 62}}
Original file line number Diff line number Diff line change
Expand Up @@ -14,46 +14,56 @@
{
"category": "HARM_CATEGORY_HATE_SPEECH",
"probability": "NEGLIGIBLE",
"probabilityScore": 0.16013464,
"probabilityScore": 0.16926852,
"severity": "HARM_SEVERITY_NEGLIGIBLE",
"severityScore": 0.074500255
"severityScore": 0.08181271
},
{
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
"probability": "NEGLIGIBLE",
"probabilityScore": 0.09687653,
"probabilityScore": 0.15636235,
"severity": "HARM_SEVERITY_NEGLIGIBLE",
"severityScore": 0.049313594
"severityScore": 0.02981654
},
{
"category": "HARM_CATEGORY_HARASSMENT",
"probability": "NEGLIGIBLE",
"probabilityScore": 0.16817278,
"probabilityScore": 0.33133608,
"severity": "HARM_SEVERITY_NEGLIGIBLE",
"severityScore": 0.09451043
"severityScore": 0.10875559
},
{
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
"probability": "NEGLIGIBLE",
"probabilityScore": 0.05023736,
"probabilityScore": 0.17766814,
"severity": "HARM_SEVERITY_NEGLIGIBLE",
"severityScore": 0.034553625
"severityScore": 0.18787657
}
],
"citationMetadata": {
"citations": [
{
"startIndex": 179,
"endIndex": 366,
"uri": "https://www.example.com/some-citation"
"endIndex": 128,
"uri": "https://www.example.com/some-citation-1"
},
{
"startIndex": 130,
"endIndex": 265,
"uri": "https://www.example.com/some-citation-2"
},
{
"startIndex": 272,
"endIndex": 431,
"uri": "https://www.example.com/some-citation-3",
"license": "mit"
}
]
}
}
],
"usageMetadata": {
"promptTokenCount": 11,
"candidatesTokenCount": 135,
"totalTokenCount": 146
"promptTokenCount": 15,
"candidatesTokenCount": 253,
"totalTokenCount": 268
}
}
32 changes: 22 additions & 10 deletions FirebaseVertexAI/Tests/Unit/GenerativeModelTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -108,12 +108,22 @@ final class GenerativeModelTests: XCTestCase {
XCTAssertEqual(candidate.content.parts.count, 1)
XCTAssertEqual(response.text, "Some information cited from an external source")
let citationMetadata = try XCTUnwrap(candidate.citationMetadata)
XCTAssertEqual(citationMetadata.citationSources.count, 1)
let citationSource = try XCTUnwrap(citationMetadata.citationSources.first)
XCTAssertEqual(citationSource.uri, "https://www.example.com/some-citation")
XCTAssertEqual(citationSource.startIndex, 179)
XCTAssertEqual(citationSource.endIndex, 366)
XCTAssertNil(citationSource.license)
XCTAssertEqual(citationMetadata.citationSources.count, 3)
let citationSource1 = try XCTUnwrap(citationMetadata.citationSources[0])
XCTAssertEqual(citationSource1.uri, "https://www.example.com/some-citation-1")
XCTAssertEqual(citationSource1.startIndex, 0)
XCTAssertEqual(citationSource1.endIndex, 128)
XCTAssertNil(citationSource1.license)
let citationSource2 = try XCTUnwrap(citationMetadata.citationSources[1])
XCTAssertEqual(citationSource2.uri, "https://www.example.com/some-citation-2")
XCTAssertEqual(citationSource2.startIndex, 130)
XCTAssertEqual(citationSource2.endIndex, 265)
XCTAssertNil(citationSource2.license)
let citationSource3 = try XCTUnwrap(citationMetadata.citationSources[2])
XCTAssertEqual(citationSource3.uri, "https://www.example.com/some-citation-3")
XCTAssertEqual(citationSource3.startIndex, 272)
XCTAssertEqual(citationSource3.endIndex, 431)
XCTAssertEqual(citationSource3.license, "mit")
}

func testGenerateContent_success_quoteReply() async throws {
Expand Down Expand Up @@ -778,13 +788,15 @@ final class GenerativeModelTests: XCTestCase {
XCTAssertEqual(citations.count, 3)
XCTAssertTrue(citations
.contains(where: {
$0.startIndex == 31 && $0.endIndex == 187 && $0
.uri == "https://www.example.com/citation-1" && $0.license == nil
$0.startIndex == 0 && $0.endIndex == 128 && !$0.uri.isEmpty && $0.license == nil
}))
XCTAssertTrue(citations
.contains(where: {
$0.startIndex == 133 && $0.endIndex == 272 && $0
.uri == "https://www.example.com/citation-3" && $0.license == "mit"
$0.startIndex == 130 && $0.endIndex == 265 && !$0.uri.isEmpty && $0.license == nil
}))
XCTAssertTrue(citations
.contains(where: {
$0.startIndex == 272 && $0.endIndex == 431 && !$0.uri.isEmpty && $0.license == "mit"
}))
}

Expand Down

0 comments on commit 0a85432

Please sign in to comment.