summaryrefslogtreecommitdiff
path: root/src/common
diff options
context:
space:
mode:
authorTom Lane2024-05-14 20:34:50 +0000
committerTom Lane2024-05-14 20:34:50 +0000
commitda256a4a7fdcca35fe7ca808686ad3de6ee22306 (patch)
tree863d6a21cd148b40add73ae45bdedcf0acc001ee /src/common
parent3ddbac368c205fce1f293de1fe60c1b479800746 (diff)
Pre-beta mechanical code beautification.
Run pgindent, pgperltidy, and reformat-dat-files. The pgindent part of this is pretty small, consisting mainly of fixing up self-inflicted formatting damage from patches that hadn't bothered to add their new typedefs to typedefs.list. In order to keep it from making anything worse, I manually added a dozen or so typedefs that appeared in the existing typedefs.list but not in the buildfarm's list. Perhaps we should formalize that, or better find a way to get those typedefs into the automatic list. pgperltidy is as opinionated as always, and reformat-dat-files too.
Diffstat (limited to 'src/common')
-rw-r--r--src/common/jsonapi.c42
-rw-r--r--src/common/unicode_case.c10
-rw-r--r--src/common/unicode_category.c4
3 files changed, 28 insertions, 28 deletions
diff --git a/src/common/jsonapi.c b/src/common/jsonapi.c
index 3d3b76822b5..a8764302e4d 100644
--- a/src/common/jsonapi.c
+++ b/src/common/jsonapi.c
@@ -173,7 +173,7 @@ typedef struct
{
size_t len;
char *prod;
-} td_entry;
+} td_entry;
#define TD_ENTRY(PROD) { sizeof(PROD) - 1, (PROD) }
@@ -181,30 +181,30 @@ static td_entry td_parser_table[JSON_NUM_NONTERMINALS][JSON_NUM_TERMINALS] =
{
/* JSON */
[OFS(JSON_NT_JSON)][JSON_TOKEN_STRING] = TD_ENTRY(JSON_PROD_SCALAR_STRING),
- [OFS(JSON_NT_JSON)][JSON_TOKEN_NUMBER] = TD_ENTRY(JSON_PROD_SCALAR_NUMBER),
- [OFS(JSON_NT_JSON)][JSON_TOKEN_TRUE] = TD_ENTRY(JSON_PROD_SCALAR_TRUE),
- [OFS(JSON_NT_JSON)][JSON_TOKEN_FALSE] = TD_ENTRY(JSON_PROD_SCALAR_FALSE),
- [OFS(JSON_NT_JSON)][JSON_TOKEN_NULL] = TD_ENTRY(JSON_PROD_SCALAR_NULL),
- [OFS(JSON_NT_JSON)][JSON_TOKEN_ARRAY_START] = TD_ENTRY(JSON_PROD_ARRAY),
- [OFS(JSON_NT_JSON)][JSON_TOKEN_OBJECT_START] = TD_ENTRY(JSON_PROD_OBJECT),
+ [OFS(JSON_NT_JSON)][JSON_TOKEN_NUMBER] = TD_ENTRY(JSON_PROD_SCALAR_NUMBER),
+ [OFS(JSON_NT_JSON)][JSON_TOKEN_TRUE] = TD_ENTRY(JSON_PROD_SCALAR_TRUE),
+ [OFS(JSON_NT_JSON)][JSON_TOKEN_FALSE] = TD_ENTRY(JSON_PROD_SCALAR_FALSE),
+ [OFS(JSON_NT_JSON)][JSON_TOKEN_NULL] = TD_ENTRY(JSON_PROD_SCALAR_NULL),
+ [OFS(JSON_NT_JSON)][JSON_TOKEN_ARRAY_START] = TD_ENTRY(JSON_PROD_ARRAY),
+ [OFS(JSON_NT_JSON)][JSON_TOKEN_OBJECT_START] = TD_ENTRY(JSON_PROD_OBJECT),
/* ARRAY_ELEMENTS */
- [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_ARRAY_START] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
- [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_OBJECT_START] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
- [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_STRING] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
- [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_NUMBER] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
- [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_TRUE] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
- [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_FALSE] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
- [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_NULL] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
- [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_ARRAY_END] = TD_ENTRY(JSON_PROD_EPSILON),
+ [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_ARRAY_START] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
+ [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_OBJECT_START] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
+ [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_STRING] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
+ [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_NUMBER] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
+ [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_TRUE] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
+ [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_FALSE] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
+ [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_NULL] = TD_ENTRY(JSON_PROD_ARRAY_ELEMENTS),
+ [OFS(JSON_NT_ARRAY_ELEMENTS)][JSON_TOKEN_ARRAY_END] = TD_ENTRY(JSON_PROD_EPSILON),
/* MORE_ARRAY_ELEMENTS */
- [OFS(JSON_NT_MORE_ARRAY_ELEMENTS)][JSON_TOKEN_COMMA] = TD_ENTRY(JSON_PROD_MORE_ARRAY_ELEMENTS),
- [OFS(JSON_NT_MORE_ARRAY_ELEMENTS)][JSON_TOKEN_ARRAY_END] = TD_ENTRY(JSON_PROD_EPSILON),
+ [OFS(JSON_NT_MORE_ARRAY_ELEMENTS)][JSON_TOKEN_COMMA] = TD_ENTRY(JSON_PROD_MORE_ARRAY_ELEMENTS),
+ [OFS(JSON_NT_MORE_ARRAY_ELEMENTS)][JSON_TOKEN_ARRAY_END] = TD_ENTRY(JSON_PROD_EPSILON),
/* KEY_PAIRS */
- [OFS(JSON_NT_KEY_PAIRS)][JSON_TOKEN_STRING] = TD_ENTRY(JSON_PROD_KEY_PAIRS),
- [OFS(JSON_NT_KEY_PAIRS)][JSON_TOKEN_OBJECT_END] = TD_ENTRY(JSON_PROD_EPSILON),
+ [OFS(JSON_NT_KEY_PAIRS)][JSON_TOKEN_STRING] = TD_ENTRY(JSON_PROD_KEY_PAIRS),
+ [OFS(JSON_NT_KEY_PAIRS)][JSON_TOKEN_OBJECT_END] = TD_ENTRY(JSON_PROD_EPSILON),
/* MORE_KEY_PAIRS */
- [OFS(JSON_NT_MORE_KEY_PAIRS)][JSON_TOKEN_COMMA] = TD_ENTRY(JSON_PROD_MORE_KEY_PAIRS),
- [OFS(JSON_NT_MORE_KEY_PAIRS)][JSON_TOKEN_OBJECT_END] = TD_ENTRY(JSON_PROD_EPSILON),
+ [OFS(JSON_NT_MORE_KEY_PAIRS)][JSON_TOKEN_COMMA] = TD_ENTRY(JSON_PROD_MORE_KEY_PAIRS),
+ [OFS(JSON_NT_MORE_KEY_PAIRS)][JSON_TOKEN_OBJECT_END] = TD_ENTRY(JSON_PROD_EPSILON),
};
/* the GOAL production. Not stored in the table, but will be the initial contents of the prediction stack */
diff --git a/src/common/unicode_case.c b/src/common/unicode_case.c
index bc423b0890c..89992799930 100644
--- a/src/common/unicode_case.c
+++ b/src/common/unicode_case.c
@@ -28,7 +28,7 @@ static size_t convert_case(char *dst, size_t dstsize, const char *src, ssize_t s
pg_wchar
unicode_lowercase_simple(pg_wchar code)
{
- const pg_case_map *map = find_case_map(code);
+ const pg_case_map *map = find_case_map(code);
return map ? map->simplemap[CaseLower] : code;
}
@@ -36,7 +36,7 @@ unicode_lowercase_simple(pg_wchar code)
pg_wchar
unicode_titlecase_simple(pg_wchar code)
{
- const pg_case_map *map = find_case_map(code);
+ const pg_case_map *map = find_case_map(code);
return map ? map->simplemap[CaseTitle] : code;
}
@@ -44,7 +44,7 @@ unicode_titlecase_simple(pg_wchar code)
pg_wchar
unicode_uppercase_simple(pg_wchar code)
{
- const pg_case_map *map = find_case_map(code);
+ const pg_case_map *map = find_case_map(code);
return map ? map->simplemap[CaseUpper] : code;
}
@@ -156,7 +156,7 @@ convert_case(char *dst, size_t dstsize, const char *src, ssize_t srclen,
{
pg_wchar u1 = utf8_to_unicode((unsigned char *) src + srcoff);
int u1len = unicode_utf8len(u1);
- const pg_case_map *casemap = find_case_map(u1);
+ const pg_case_map *casemap = find_case_map(u1);
if (str_casekind == CaseTitle)
{
@@ -210,7 +210,7 @@ find_case_map(pg_wchar ucs)
Assert(lengthof(case_map) >= 0x80);
if (ucs < 0x80)
{
- const pg_case_map *map = &case_map[ucs];
+ const pg_case_map *map = &case_map[ucs];
Assert(map->codepoint == ucs);
return map;
diff --git a/src/common/unicode_category.c b/src/common/unicode_category.c
index 359e82ec316..a615a905678 100644
--- a/src/common/unicode_category.c
+++ b/src/common/unicode_category.c
@@ -75,7 +75,7 @@
#define PG_U_CHARACTER_TAB 0x09
-static bool range_search(const pg_unicode_range * tbl, size_t size,
+static bool range_search(const pg_unicode_range *tbl, size_t size,
pg_wchar code);
/*
@@ -478,7 +478,7 @@ unicode_category_abbrev(pg_unicode_category category)
* given table.
*/
static bool
-range_search(const pg_unicode_range * tbl, size_t size, pg_wchar code)
+range_search(const pg_unicode_range *tbl, size_t size, pg_wchar code)
{
int min = 0;
int mid;