Skip to content

Commit

Permalink
Merge pull request #10 from maiadegraaf/vendor_update
Browse files Browse the repository at this point in the history
Update Vendor to v1.0.0
  • Loading branch information
maiadegraaf authored Jun 26, 2024
2 parents 367ffbf + d1203df commit 2205284
Show file tree
Hide file tree
Showing 917 changed files with 58,134 additions and 31,119 deletions.
4 changes: 2 additions & 2 deletions CMakeLists.txt

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion include/duckdb_odbc.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ struct OdbcHandleEnv : public OdbcHandle {
OdbcHandleEnv() : OdbcHandle(OdbcHandleType::ENV) {
duckdb::DBConfig ODBC_CONFIG;
ODBC_CONFIG.SetOptionByName("duckdb_api", "odbc");
db = make_shared<DuckDB>(nullptr, &ODBC_CONFIG);
db = make_shared_ptr<DuckDB>(nullptr, &ODBC_CONFIG);
};

shared_ptr<DuckDB> db;
Expand Down
59 changes: 44 additions & 15 deletions src/duckdb/extension/icu/icu_extension.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,35 @@ struct IcuBindData : public FunctionData {
auto &other = other_p.Cast<IcuBindData>();
return language == other.language && country == other.country;
}

static void Serialize(Serializer &serializer, const optional_ptr<FunctionData> bind_data_p,
const ScalarFunction &function) {
auto &bind_data = bind_data_p->Cast<IcuBindData>();
serializer.WriteProperty(100, "language", bind_data.language);
serializer.WriteProperty(101, "country", bind_data.country);
}

static unique_ptr<FunctionData> Deserialize(Deserializer &deserializer, ScalarFunction &function) {
string language;
string country;
deserializer.ReadProperty(100, "language", language);
deserializer.ReadProperty(101, "country", country);

return make_uniq<IcuBindData>(language, country);
}

static const string FUNCTION_PREFIX;

static string EncodeFunctionName(const string &collation) {
return FUNCTION_PREFIX + collation;
}
static string DecodeFunctionName(const string &fname) {
return fname.substr(FUNCTION_PREFIX.size());
}
};

const string IcuBindData::FUNCTION_PREFIX = "icu_collate_";

static int32_t ICUGetSortKey(icu::Collator &collator, string_t input, duckdb::unique_ptr<char[]> &buffer,
int32_t &buffer_size) {
icu::UnicodeString unicode_string =
Expand Down Expand Up @@ -108,7 +135,9 @@ static void ICUCollateFunction(DataChunk &args, ExpressionState &state, Vector &

static duckdb::unique_ptr<FunctionData> ICUCollateBind(ClientContext &context, ScalarFunction &bound_function,
vector<duckdb::unique_ptr<Expression>> &arguments) {
auto splits = StringUtil::Split(bound_function.name, "_");

const auto collation = IcuBindData::DecodeFunctionName(bound_function.name);
auto splits = StringUtil::Split(collation, "_");
if (splits.size() == 1) {
return make_uniq<IcuBindData>(splits[0], "");
} else if (splits.size() == 2) {
Expand Down Expand Up @@ -137,19 +166,11 @@ static duckdb::unique_ptr<FunctionData> ICUSortKeyBind(ClientContext &context, S
}
}

static void ICUCollateSerialize(Serializer &serializer, const optional_ptr<FunctionData> bind_data,
const ScalarFunction &function) {
throw NotImplementedException("FIXME: serialize icu-collate");
}

static duckdb::unique_ptr<FunctionData> ICUCollateDeserialize(Deserializer &deserializer, ScalarFunction &function) {
throw NotImplementedException("FIXME: serialize icu-collate");
}

static ScalarFunction GetICUFunction(const string &collation) {
ScalarFunction result(collation, {LogicalType::VARCHAR}, LogicalType::VARCHAR, ICUCollateFunction, ICUCollateBind);
result.serialize = ICUCollateSerialize;
result.deserialize = ICUCollateDeserialize;
static ScalarFunction GetICUCollateFunction(const string &collation) {
string fname = IcuBindData::EncodeFunctionName(collation);
ScalarFunction result(fname, {LogicalType::VARCHAR}, LogicalType::VARCHAR, ICUCollateFunction, ICUCollateBind);
result.serialize = IcuBindData::Serialize;
result.deserialize = IcuBindData::Deserialize;
return result;
}

Expand Down Expand Up @@ -238,7 +259,7 @@ static void LoadInternal(DuckDB &ddb) {
}
collation = StringUtil::Lower(collation);

CreateCollationInfo info(collation, GetICUFunction(collation), false, false);
CreateCollationInfo info(collation, GetICUCollateFunction(collation), false, false);
ExtensionUtil::RegisterCollation(db, info);
}
ScalarFunction sort_key("icu_sort_key", {LogicalType::VARCHAR, LogicalType::VARCHAR}, LogicalType::VARCHAR,
Expand Down Expand Up @@ -283,6 +304,14 @@ std::string IcuExtension::Name() {
return "icu";
}

std::string IcuExtension::Version() const {
#ifdef EXT_VERSION_ICU
return EXT_VERSION_ICU;
#else
return "";
#endif
}

} // namespace duckdb

extern "C" {
Expand Down
1 change: 1 addition & 0 deletions src/duckdb/extension/icu/include/icu_extension.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ class IcuExtension : public Extension {
public:
void Load(DuckDB &db) override;
std::string Name() override;
std::string Version() const override;
};

} // namespace duckdb
10 changes: 5 additions & 5 deletions src/duckdb/extension/icu/third_party/icu/i18n/basictz.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ BasicTimeZone::hasEquivalentTransitions(const BasicTimeZone& tz, UDate start, UD
} else {
if (raw1 != raw2 || dst1 != dst2) {
return FALSE;
}
}
}
// Check transitions in the range
UDate time = start;
Expand Down Expand Up @@ -159,7 +159,7 @@ BasicTimeZone::getSimpleRulesNear(UDate date, InitialTimeZoneRule*& initial,
if (((tr.getFrom()->getDSTSavings() == 0 && tr.getTo()->getDSTSavings() != 0)
|| (tr.getFrom()->getDSTSavings() != 0 && tr.getTo()->getDSTSavings() == 0))
&& (date + MILLIS_PER_YEAR > nextTransitionTime)) {

int32_t year, month, dom, dow, doy, mid;
UDate d;

Expand Down Expand Up @@ -375,13 +375,13 @@ BasicTimeZone::getTimeZoneRulesAfter(UDate start, InitialTimeZoneRule*& initial,
UDate updatedTime = tzt.getTime();
if (updatedTime == time) {
// Can get here if rules for start & end of daylight time have exactly
// the same time.
// the same time.
// TODO: fix getNextTransition() to prevent it?
status = U_INVALID_STATE_ERROR;
goto error;
}
time = updatedTime;

const TimeZoneRule *toRule = tzt.getTo();
for (i = 0; i < ruleCount; i++) {
r = (TimeZoneRule*)orgRules->elementAt(i);
Expand All @@ -408,7 +408,7 @@ BasicTimeZone::getTimeZoneRulesAfter(UDate start, InitialTimeZoneRule*& initial,
if (!avail) {
break;
}
if (*(tzt0.getTo()) == *tar) {
if (*tar == *(tzt0.getTo())) {
break;
}
t = tzt0.getTime();
Expand Down
7 changes: 6 additions & 1 deletion src/duckdb/extension/json/include/json_common.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
#include "duckdb/planner/expression/bound_function_expression.hpp"
#include "yyjson.hpp"

using namespace duckdb_yyjson; // NOLINT

namespace duckdb {

//! JSON allocator is a custom allocator for yyjson that prevents many tiny allocations
Expand Down Expand Up @@ -108,6 +110,7 @@ struct JSONCommon {
switch (yyjson_get_tag(val)) {
case YYJSON_TYPE_NULL | YYJSON_SUBTYPE_NONE:
return JSONCommon::TYPE_STRING_NULL;
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NOESC:
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NONE:
return JSONCommon::TYPE_STRING_VARCHAR;
case YYJSON_TYPE_ARR | YYJSON_SUBTYPE_NONE:
Expand Down Expand Up @@ -136,6 +139,7 @@ struct JSONCommon {
switch (yyjson_get_tag(val)) {
case YYJSON_TYPE_NULL | YYJSON_SUBTYPE_NONE:
return LogicalTypeId::SQLNULL;
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NOESC:
case YYJSON_TYPE_STR | YYJSON_SUBTYPE_NONE:
return LogicalTypeId::VARCHAR;
case YYJSON_TYPE_ARR | YYJSON_SUBTYPE_NONE:
Expand Down Expand Up @@ -286,7 +290,8 @@ struct JSONCommon {
private:
//! Get JSON pointer (/field/index/... syntax)
static inline yyjson_val *GetPointer(yyjson_val *val, const char *ptr, const idx_t &len) {
return len == 1 ? val : unsafe_yyjson_get_pointer(val, ptr, len);
yyjson_ptr_err err;
return len == 1 ? val : unsafe_yyjson_ptr_getx(val, ptr, len, &err);
}
//! Get JSON path ($.field[index]... syntax)
static yyjson_val *GetPath(yyjson_val *val, const char *ptr, const idx_t &len);
Expand Down
10 changes: 5 additions & 5 deletions src/duckdb/extension/json/include/json_executors.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ struct JSONExecutors {
}

//! Two-argument JSON read function (with path query), i.e. json_type('[1, 2, 3]', '$[0]')
template <class T>
template <class T, bool NULL_IF_NULL = true>
static void BinaryExecute(DataChunk &args, ExpressionState &state, Vector &result,
std::function<T(yyjson_val *, yyjson_alc *, Vector &)> fun) {
auto &func_expr = state.expr.Cast<BoundFunctionExpression>();
Expand All @@ -48,7 +48,7 @@ struct JSONExecutors {
auto doc =
JSONCommon::ReadDocument(input, JSONCommon::READ_FLAG, lstate.json_allocator.GetYYAlc());
auto val = JSONCommon::GetUnsafe(doc->root, ptr, len);
if (!val || unsafe_yyjson_is_null(val)) {
if (!val || (NULL_IF_NULL && unsafe_yyjson_is_null(val))) {
mask.SetInvalid(idx);
return T {};
} else {
Expand Down Expand Up @@ -76,7 +76,7 @@ struct JSONExecutors {
for (idx_t i = 0; i < vals.size(); i++) {
auto &val = vals[i];
D_ASSERT(val != nullptr); // Wildcard extract shouldn't give back nullptrs
if (unsafe_yyjson_is_null(val)) {
if (NULL_IF_NULL && unsafe_yyjson_is_null(val)) {
child_validity.SetInvalid(current_size + i);
} else {
child_vals[current_size + i] = fun(val, alc, result);
Expand Down Expand Up @@ -109,7 +109,7 @@ struct JSONExecutors {
}

//! JSON read function with list of path queries, i.e. json_type('[1, 2, 3]', ['$[0]', '$[1]'])
template <class T>
template <class T, bool NULL_IF_NULL = true>
static void ExecuteMany(DataChunk &args, ExpressionState &state, Vector &result,
std::function<T(yyjson_val *, yyjson_alc *, Vector &)> fun) {
auto &func_expr = state.expr.Cast<BoundFunctionExpression>();
Expand Down Expand Up @@ -148,7 +148,7 @@ struct JSONExecutors {
for (idx_t path_i = 0; path_i < num_paths; path_i++) {
auto child_idx = offset + path_i;
val = JSONCommon::GetUnsafe(doc->root, info.ptrs[path_i], info.lens[path_i]);
if (!val || unsafe_yyjson_is_null(val)) {
if (!val || (NULL_IF_NULL && unsafe_yyjson_is_null(val))) {
child_validity.SetInvalid(child_idx);
} else {
child_data[child_idx] = fun(val, alc, child);
Expand Down
1 change: 1 addition & 0 deletions src/duckdb/extension/json/include/json_extension.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ class JsonExtension : public Extension {
public:
void Load(DuckDB &db) override;
std::string Name() override;
std::string Version() const override;
};

} // namespace duckdb
4 changes: 2 additions & 2 deletions src/duckdb/extension/json/include/json_functions.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,8 @@ class JSONFunctions {
static vector<ScalarFunctionSet> GetScalarFunctions();
static vector<PragmaFunctionSet> GetPragmaFunctions();
static vector<TableFunctionSet> GetTableFunctions();
static unique_ptr<TableRef> ReadJSONReplacement(ClientContext &context, const string &table_name,
ReplacementScanData *data);
static unique_ptr<TableRef> ReadJSONReplacement(ClientContext &context, ReplacementScanInput &input,
optional_ptr<ReplacementScanData> data);
static TableFunction GetReadJSONTableFunction(shared_ptr<JSONScanInfo> function_info);
static CopyFunction GetJSONCopyFunction();
static void RegisterSimpleCastFunctions(CastFunctionSet &casts);
Expand Down
4 changes: 2 additions & 2 deletions src/duckdb/extension/json/include/json_serializer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ struct JsonSerializer : Serializer {
public:
explicit JsonSerializer(yyjson_mut_doc *doc, bool skip_if_null, bool skip_if_empty, bool skip_if_default)
: doc(doc), stack({yyjson_mut_obj(doc)}), skip_if_null(skip_if_null), skip_if_empty(skip_if_empty) {
serialize_enum_as_string = true;
serialize_default_values = !skip_if_default;
options.serialize_enum_as_string = true;
options.serialize_default_values = !skip_if_default;
}

template <class T>
Expand Down
Loading

0 comments on commit 2205284

Please sign in to comment.