package com.google.firebase.firestore; import com.google.firebase.Timestamp; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.core.UserData; import com.google.firebase.firestore.model.DatabaseId; import com.google.firebase.firestore.model.ObjectValue; import com.google.firebase.firestore.model.mutation.ArrayTransformOperation; import com.google.firebase.firestore.model.mutation.FieldMask; import com.google.firebase.firestore.model.mutation.NumericIncrementTransformOperation; import com.google.firebase.firestore.model.mutation.ServerTimestampOperation; import com.google.firebase.firestore.util.Assert; import com.google.firebase.firestore.util.CustomClassMapper; import com.google.firebase.firestore.util.Preconditions; import com.google.firebase.firestore.util.Util; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import o.AQX; import o.EnumC15686hkD; import o.jrE; import o.ldG; import o.sbb; import o.uOO; /* loaded from: classes2.dex */ public final class UserDataReader { private final DatabaseId databaseId; public UserDataReader(DatabaseId databaseId) { this.databaseId = databaseId; } public final UserData.ParsedSetData parseSetData(Object obj) { UserData.ParseAccumulator parseAccumulator = new UserData.ParseAccumulator(UserData.Source.Set); return parseAccumulator.toSetData(convertAndParseDocumentData(obj, parseAccumulator.rootContext())); } public final UserData.ParsedSetData parseMergeData(Object obj, FieldMask fieldMask) { UserData.ParseAccumulator parseAccumulator = new UserData.ParseAccumulator(UserData.Source.MergeSet); ObjectValue convertAndParseDocumentData = convertAndParseDocumentData(obj, parseAccumulator.rootContext()); if (fieldMask != null) { for (com.google.firebase.firestore.model.FieldPath fieldPath : fieldMask.getMask()) { if (!parseAccumulator.contains(fieldPath)) { StringBuilder sb = new StringBuilder("Field '"); sb.append(fieldPath.toString()); sb.append("' is specified in your field mask but not in your input data."); throw new IllegalArgumentException(sb.toString()); } } return parseAccumulator.toMergeData(convertAndParseDocumentData, fieldMask); } return parseAccumulator.toMergeData(convertAndParseDocumentData); } public final UserData.ParsedUpdateData parseUpdateData(Map map) { Preconditions.checkNotNull(map, "Provided update data must not be null."); UserData.ParseAccumulator parseAccumulator = new UserData.ParseAccumulator(UserData.Source.Update); UserData.ParseContext rootContext = parseAccumulator.rootContext(); ObjectValue objectValue = new ObjectValue(); for (Map.Entry entry : map.entrySet()) { com.google.firebase.firestore.model.FieldPath internalPath = FieldPath.fromDotSeparatedPath(entry.getKey()).getInternalPath(); Object value = entry.getValue(); if (value instanceof FieldValue.DeleteFieldValue) { rootContext.addToFieldMask(internalPath); } else { sbb convertAndParseFieldData = convertAndParseFieldData(value, rootContext.childContext(internalPath)); if (convertAndParseFieldData != null) { rootContext.addToFieldMask(internalPath); objectValue.set(internalPath, convertAndParseFieldData); } } } return parseAccumulator.toUpdateData(objectValue); } public final UserData.ParsedUpdateData parseUpdateData(List list) { com.google.firebase.firestore.model.FieldPath internalPath; Assert.hardAssert(list.size() % 2 == 0, "Expected fieldAndValues to contain an even number of elements", new Object[0]); UserData.ParseAccumulator parseAccumulator = new UserData.ParseAccumulator(UserData.Source.Update); UserData.ParseContext rootContext = parseAccumulator.rootContext(); ObjectValue objectValue = new ObjectValue(); Iterator it = list.iterator(); while (it.hasNext()) { Object next = it.next(); Object next2 = it.next(); boolean z = next instanceof String; Assert.hardAssert(z || (next instanceof FieldPath), "Expected argument to be String or FieldPath.", new Object[0]); if (z) { internalPath = FieldPath.fromDotSeparatedPath((String) next).getInternalPath(); } else { internalPath = ((FieldPath) next).getInternalPath(); } if (next2 instanceof FieldValue.DeleteFieldValue) { rootContext.addToFieldMask(internalPath); } else { sbb convertAndParseFieldData = convertAndParseFieldData(next2, rootContext.childContext(internalPath)); if (convertAndParseFieldData != null) { rootContext.addToFieldMask(internalPath); objectValue.set(internalPath, convertAndParseFieldData); } } } return parseAccumulator.toUpdateData(objectValue); } public final sbb parseQueryValue(Object obj) { return parseQueryValue(obj, false); } public final sbb parseQueryValue(Object obj, boolean z) { UserData.ParseAccumulator parseAccumulator = new UserData.ParseAccumulator(z ? UserData.Source.ArrayArgument : UserData.Source.Argument); sbb convertAndParseFieldData = convertAndParseFieldData(obj, parseAccumulator.rootContext()); Assert.hardAssert(convertAndParseFieldData != null, "Parsed data should not be null.", new Object[0]); Assert.hardAssert(parseAccumulator.getFieldTransforms().isEmpty(), "Field transforms should have been disallowed.", new Object[0]); return convertAndParseFieldData; } public final sbb convertAndParseFieldData(Object obj, UserData.ParseContext parseContext) { return parseData(CustomClassMapper.convertToPlainJavaTypes(obj), parseContext); } private ObjectValue convertAndParseDocumentData(Object obj, UserData.ParseContext parseContext) { if (obj.getClass().isArray()) { throw new IllegalArgumentException("Invalid data. Data must be a Map or a suitable POJO object, but it was an array"); } sbb parseData = parseData(CustomClassMapper.convertToPlainJavaTypes(obj), parseContext); if (sbb.HBt.c(parseData.valueTypeCase_) != sbb.HBt.MAP_VALUE) { StringBuilder sb = new StringBuilder("Invalid data. Data must be a Map or a suitable POJO object, but it was of type: "); sb.append(Util.typeName(obj)); throw new IllegalArgumentException(sb.toString()); } return new ObjectValue(parseData); } private sbb parseData(Object obj, UserData.ParseContext parseContext) { if (obj instanceof Map) { return parseMap((Map) obj, parseContext); } if (obj instanceof FieldValue) { parseSentinelFieldValue((FieldValue) obj, parseContext); return null; } if (parseContext.getPath() != null) { parseContext.addToFieldMask(parseContext.getPath()); } if (obj instanceof List) { if (parseContext.isArrayElement() && parseContext.getDataSource() != UserData.Source.ArrayArgument) { throw parseContext.createError("Nested arrays are not supported"); } return parseList((List) obj, parseContext); } return parseScalarValue(obj, parseContext); } private sbb parseMap(Map map, UserData.ParseContext parseContext) { if (map.isEmpty()) { if (parseContext.getPath() != null && !parseContext.getPath().isEmpty()) { parseContext.addToFieldMask(parseContext.getPath()); } return sbb.d().c(ldG.d()).build(); } ldG.RVV a = ldG.a(); for (Map.Entry entry : map.entrySet()) { if (!(entry.getKey() instanceof String)) { throw parseContext.createError(String.format("Non-String Map key (%s) is not allowed", entry.getValue())); } String str = (String) entry.getKey(); sbb parseData = parseData(entry.getValue(), parseContext.childContext(str)); if (parseData != null) { a.b(str, parseData); } } return sbb.d().d(a).build(); } private sbb parseList(List list, UserData.ParseContext parseContext) { uOO.IeS a = uOO.a(); Iterator it = list.iterator(); int i = 0; while (it.hasNext()) { sbb parseData = parseData(it.next(), parseContext.childContext(i)); if (parseData == null) { parseData = sbb.d().b(EnumC15686hkD.NULL_VALUE).build(); } a.a(parseData); i++; } return sbb.d().b(a).build(); } private void parseSentinelFieldValue(FieldValue fieldValue, UserData.ParseContext parseContext) { if (!parseContext.isWrite()) { throw parseContext.createError(String.format("%s() can only be used with set() and update()", fieldValue.getMethodName())); } if (parseContext.getPath() == null) { throw parseContext.createError(String.format("%s() is not currently supported inside arrays", fieldValue.getMethodName())); } if (fieldValue instanceof FieldValue.DeleteFieldValue) { if (parseContext.getDataSource() == UserData.Source.MergeSet) { parseContext.addToFieldMask(parseContext.getPath()); return; } else { if (parseContext.getDataSource() == UserData.Source.Update) { Assert.hardAssert(parseContext.getPath().length() > 0, "FieldValue.delete() at the top level should have already been handled.", new Object[0]); throw parseContext.createError("FieldValue.delete() can only appear at the top level of your update data"); } throw parseContext.createError("FieldValue.delete() can only be used with update() and set() with SetOptions.merge()"); } } if (fieldValue instanceof FieldValue.ServerTimestampFieldValue) { parseContext.addToFieldTransforms(parseContext.getPath(), ServerTimestampOperation.getInstance()); return; } if (fieldValue instanceof FieldValue.ArrayUnionFieldValue) { parseContext.addToFieldTransforms(parseContext.getPath(), new ArrayTransformOperation.Union(parseArrayTransformElements(((FieldValue.ArrayUnionFieldValue) fieldValue).getElements()))); } else if (fieldValue instanceof FieldValue.ArrayRemoveFieldValue) { parseContext.addToFieldTransforms(parseContext.getPath(), new ArrayTransformOperation.Remove(parseArrayTransformElements(((FieldValue.ArrayRemoveFieldValue) fieldValue).getElements()))); } else { if (fieldValue instanceof FieldValue.NumericIncrementFieldValue) { parseContext.addToFieldTransforms(parseContext.getPath(), new NumericIncrementTransformOperation(parseQueryValue(((FieldValue.NumericIncrementFieldValue) fieldValue).getOperand()))); return; } throw Assert.fail("Unknown FieldValue type: %s", Util.typeName(fieldValue)); } } private sbb parseScalarValue(Object obj, UserData.ParseContext parseContext) { if (obj == null) { return sbb.d().b(EnumC15686hkD.NULL_VALUE).build(); } if (obj instanceof Integer) { return sbb.d().e(((Integer) obj).intValue()).build(); } if (obj instanceof Long) { return sbb.d().e(((Long) obj).longValue()).build(); } if (obj instanceof Float) { return sbb.d().b(((Float) obj).doubleValue()).build(); } if (obj instanceof Double) { return sbb.d().b(((Double) obj).doubleValue()).build(); } if (obj instanceof Boolean) { return sbb.d().a(((Boolean) obj).booleanValue()).build(); } if (obj instanceof String) { return sbb.d().e((String) obj).build(); } if (obj instanceof Date) { return parseTimestamp(new Timestamp((Date) obj)); } if (obj instanceof Timestamp) { return parseTimestamp((Timestamp) obj); } if (obj instanceof GeoPoint) { GeoPoint geoPoint = (GeoPoint) obj; return sbb.d().c(jrE.b().a(geoPoint.getLatitude()).c(geoPoint.getLongitude())).build(); } if (obj instanceof Blob) { return sbb.d().e(((Blob) obj).toByteString()).build(); } if (obj instanceof DocumentReference) { DocumentReference documentReference = (DocumentReference) obj; if (documentReference.getFirestore() != null) { DatabaseId databaseId = documentReference.getFirestore().getDatabaseId(); if (!databaseId.equals(this.databaseId)) { throw parseContext.createError(String.format("Document reference is for database %s/%s but should be for database %s/%s", databaseId.getProjectId(), databaseId.getDatabaseId(), this.databaseId.getProjectId(), this.databaseId.getDatabaseId())); } } return sbb.d().b(String.format("projects/%s/databases/%s/documents/%s", this.databaseId.getProjectId(), this.databaseId.getDatabaseId(), documentReference.getPath())).build(); } if (obj.getClass().isArray()) { throw parseContext.createError("Arrays are not supported; use a List instead"); } StringBuilder sb = new StringBuilder("Unsupported type: "); sb.append(Util.typeName(obj)); throw parseContext.createError(sb.toString()); } private sbb parseTimestamp(Timestamp timestamp) { return sbb.d().e(AQX.b().c(timestamp.getSeconds()).e((timestamp.getNanoseconds() / 1000) * 1000)).build(); } private List parseArrayTransformElements(List list) { UserData.ParseAccumulator parseAccumulator = new UserData.ParseAccumulator(UserData.Source.Argument); ArrayList arrayList = new ArrayList(list.size()); for (int i = 0; i < list.size(); i++) { arrayList.add(convertAndParseFieldData(list.get(i), parseAccumulator.rootContext().childContext(i))); } return arrayList; } }