Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[rhel-9.3] backport fixes #19740

Merged
merged 7 commits into from
Dec 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 15 additions & 7 deletions pkg/apps/application-list.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -98,11 +98,19 @@ export const ApplicationList = ({ metainfo_db, appProgress, appProgressTitle, ac
comps.push(metainfo_db.components[id]);
comps.sort((a, b) => a.name.localeCompare(b.name));

function get_config(name, distro_id, def) {
function get_config(name, os_release, def) {
// ID is a single value, ID_LIKE is a list
const os_list = [os_release.ID || "", ...(os_release.ID_LIKE || "").split(/\s+/)];

if (cockpit.manifests.apps && cockpit.manifests.apps.config) {
let val = cockpit.manifests.apps.config[name];
if (typeof val === 'object' && val !== null && !Array.isArray(val))
val = val[distro_id];
const val = cockpit.manifests.apps.config[name];
if (typeof val === 'object' && val !== null && !Array.isArray(val)) {
for (const os of os_list) {
if (val[os])
return val[os];
}
return def;
}
return val !== undefined ? val : def;
} else {
return def;
Expand All @@ -112,16 +120,16 @@ export const ApplicationList = ({ metainfo_db, appProgress, appProgressTitle, ac
function refresh() {
read_os_release().then(os_release =>
PackageKit.refresh(metainfo_db.origin_files,
get_config('appstream_config_packages', os_release.ID, []),
get_config('appstream_data_packages', os_release.ID, []),
get_config('appstream_config_packages', os_release, []),
get_config('appstream_data_packages', os_release, []),
setProgress))
.finally(() => setProgress(false))
.catch(show_error);
}

let refresh_progress, refresh_button, tbody;
if (progress) {
refresh_progress = <ProgressBar size="sm" title={_("Checking for new applications")} data={progress} />;
refresh_progress = <ProgressBar id="refresh-progress" size="sm" title={_("Checking for new applications")} data={progress} />;
refresh_button = <CancelButton data={progress} />;
} else {
refresh_progress = null;
Expand Down
4 changes: 2 additions & 2 deletions pkg/apps/manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@

"config": {
"appstream_config_packages": {
"debian": ["appstream"], "ubuntu": ["appstream"]
"debian": ["appstream"]
},
"appstream_data_packages": {
"fedora": ["appstream-data"], "rhel": ["appstream-data"]
"fedora": ["appstream-data"]
}
}
}
27 changes: 18 additions & 9 deletions pkg/lib/cockpit-po-plugin.js
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,7 @@ function get_plural_expr(statement) {
return expr;
}

function buildFile(po_file, subdir, webpack_module, webpack_compilation) {
if (webpack_compilation)
webpack_compilation.fileDependencies.add(po_file);

function buildFile(po_file, subdir, webpack_module, webpack_compilation, out_path, filter) {
return new Promise((resolve, reject) => {
const parsed = gettext_parser.po.parse(fs.readFileSync(po_file), 'utf8');
delete parsed.translations[""][""]; // second header copy
Expand Down Expand Up @@ -76,6 +73,9 @@ function buildFile(po_file, subdir, webpack_module, webpack_compilation) {
if (translation.comments.flag?.match(/\bfuzzy\b/))
continue;

if (!references.some(filter))
continue;

const key = JSON.stringify(context_prefix + msgid);
// cockpit.js always ignores the first item
chunks.push(`,\n ${key}: [\n null`);
Expand All @@ -90,8 +90,6 @@ function buildFile(po_file, subdir, webpack_module, webpack_compilation) {
const wrapper = config.wrapper?.(subdir) || DEFAULT_WRAPPER;
const output = wrapper.replace('PO_DATA', chunks.join('')) + '\n';

const lang = path.basename(po_file).slice(0, -3);
const out_path = (subdir ? (subdir + '/') : '') + 'po.' + lang + '.js';
if (webpack_compilation)
webpack_compilation.emitAsset(out_path, new webpack_module.sources.RawSource(output));
else
Expand All @@ -110,9 +108,20 @@ function init(options) {

function run(webpack_module, webpack_compilation) {
const promises = [];
config.subdirs.map(subdir =>
promises.push(...get_po_files().map(po_file =>
buildFile(po_file, subdir, webpack_module, webpack_compilation))));
for (const subdir of config.subdirs) {
for (const po_file of get_po_files()) {
if (webpack_compilation)
webpack_compilation.fileDependencies.add(po_file);
const lang = path.basename(po_file).slice(0, -3);
promises.push(Promise.all([
// Separate translations for the manifest.json file and normal pages
buildFile(po_file, subdir, webpack_module, webpack_compilation,
`${subdir}/po.${lang}.js`, str => !str.includes('manifest.json')),
buildFile(po_file, subdir, webpack_module, webpack_compilation,
`${subdir}/po.manifest.${lang}.js`, str => str.includes('manifest.json'))
]));
}
}
return Promise.all(promises);
}

Expand Down
3 changes: 3 additions & 0 deletions pkg/shell/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,10 @@
<link href="../../static/branding.css" rel="stylesheet" />
<script src="../base1/cockpit.js"></script>
<script src="../manifests.js"></script>
<!-- HACK: C bridge loads translations via glob and Python via manifest.js -->
<script src="../*/po.manifest.js"></script>
<script src="../*/po.js"></script>
<script src="po.js"></script>
</head>
<body class="pf-v5-m-tabular-nums" hidden="true">
<div id="main" class="page">
Expand Down
13 changes: 12 additions & 1 deletion src/client/cockpit-client
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,17 @@ def prctl(*args):
raise Exception('prctl() failed')


def get_user_state_dir():
try:
# GLib ≥ 2.72
return GLib.get_user_state_dir()
except AttributeError:
try:
return os.environ["XDG_STATE_HOME"]
except KeyError:
return os.path.expanduser("~/.local/share")


prctl.SET_PDEATHSIG = 1


Expand Down Expand Up @@ -222,7 +233,7 @@ class CockpitClient(Gtk.Application):
context.set_sandbox_enabled(enabled=True)
context.set_cache_model(WebKit2.CacheModel.DOCUMENT_VIEWER)

cookiesFile = os.path.join(GLib.get_user_state_dir(), "cockpit-client", "cookies.txt")
cookiesFile = os.path.join(get_user_state_dir(), "cockpit-client", "cookies.txt")
cookies = context.get_cookie_manager()
cookies.set_persistent_storage(cookiesFile, WebKit2.CookiePersistentStorage.TEXT)

Expand Down
124 changes: 85 additions & 39 deletions src/cockpit/packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import functools
import gzip
import io
import itertools
import json
import logging
import mimetypes
Expand Down Expand Up @@ -62,28 +63,55 @@
logger = logging.getLogger(__name__)


def parse_accept_language(headers: JsonObject) -> List[str]:
# In practice, this is going to get called over and over again with exactly the
# same list. Let's try to cache the result.
@functools.lru_cache()
def parse_accept_language(accept_language: str) -> Sequence[str]:
"""Parse the Accept-Language header, if it exists.

Returns an ordered list of languages.
Returns an ordered list of languages, with fallbacks inserted, and
truncated to the position where 'en' would have otherwise appeared, if
applicable.

https://tools.ietf.org/html/rfc7231#section-5.3.5
https://datatracker.ietf.org/doc/html/rfc4647#section-3.4
"""

locales = []
for language in get_str(headers, 'Accept-Language', '').split(','):
language = language.strip()
locale, _, weightstr = language.partition(';q=')
weight = float(weightstr or 1)

# Skip possible empty locales
if not locale:
continue

# Locales are case-insensitive and we store our list in lowercase
locales.append((locale.lower(), weight))

return [locale for locale, _ in sorted(locales, key=lambda k: k[1], reverse=True)]
logger.debug('parse_accept_language(%r)', accept_language)
locales_with_q = []
for entry in accept_language.split(','):
entry = entry.strip().lower()
logger.debug(' entry %r', entry)
locale, _, qstr = entry.partition(';q=')
try:
q = float(qstr or 1.0)
except ValueError:
continue # ignore malformed entry

while locale:
logger.debug(' adding %r q=%r', locale, q)
locales_with_q.append((locale, q))
# strip off '-detail' suffixes until there's nothing left
locale, _, _region = locale.rpartition('-')

# Sort the list by highest q value. Otherwise, this is a stable sort.
locales_with_q.sort(key=lambda pair: pair[1], reverse=True)
logger.debug(' sorted list is %r', locales_with_q)

# If we have 'en' anywhere in our list, ignore it and all items after it.
# This will result in us getting an untranslated (ie: English) version if
# none of the more-preferred languages are found, which is what we want.
# We also take the chance to drop duplicate items. Note: both of these
# things need to happen after sorting.
results = []
for locale, _q in locales_with_q:
if locale == 'en':
break
if locale not in results:
results.append(locale)

logger.debug(' results list is %r', results)
return tuple(results)


def sortify_version(version: str) -> str:
Expand Down Expand Up @@ -157,7 +185,8 @@ def __init__(self, path: Path, value: JsonObject):


class Package:
PO_JS_RE: ClassVar[Pattern] = re.compile(r'po\.([^.]+)\.js(\.gz)?')
# For po{,.manifest}.js files, the interesting part is the locale name
PO_JS_RE: ClassVar[Pattern] = re.compile(r'(po|po\.manifest)\.([^.]+)\.js(\.gz)?')

# immutable after __init__
manifest: Manifest
Expand All @@ -166,7 +195,7 @@ class Package:
priority: int

# computed later
translations: Optional[Dict[str, str]] = None
translations: Optional[Dict[str, Dict[str, str]]] = None
files: Optional[Dict[str, str]] = None

def __init__(self, manifest: Manifest):
Expand All @@ -186,7 +215,7 @@ def ensure_scanned(self) -> None:
return

self.files = {}
self.translations = {}
self.translations = {'po.js': {}, 'po.manifest.js': {}}

for file in self.path.rglob('*'):
name = str(file.relative_to(self.path))
Expand All @@ -195,14 +224,31 @@ def ensure_scanned(self) -> None:

po_match = Package.PO_JS_RE.fullmatch(name)
if po_match:
locale = po_match.group(1)
basename = po_match.group(1)
locale = po_match.group(2)
# Accept-Language is case-insensitive and uses '-' to separate variants
lower_locale = locale.lower().replace('_', '-')
self.translations[lower_locale] = name

logger.debug('Adding translation %r %r -> %r', basename, lower_locale, name)
self.translations[f'{basename}.js'][lower_locale] = name
else:
basename = name[:-3] if name.endswith('.gz') else name
# strip out trailing '.gz' components
basename = re.sub('.gz$', '', name)
logger.debug('Adding content %r -> %r', basename, name)
self.files[basename] = name

# If we see a filename like `x.min.js` we want to also offer it
# at `x.js`, but only if `x.js(.gz)` itself is not present.
# Note: this works for both the case where we found the `x.js`
# first (it's already in the map) and also if we find it second
# (it will be replaced in the map by the line just above).
# See https://github.com/cockpit-project/cockpit/pull/19716
self.files.setdefault(basename.replace('.min.', '.'), name)

# support old cockpit-po-plugin which didn't write po.manifest.??.js
if not self.translations['po.manifest.js']:
self.translations['po.manifest.js'] = self.translations['po.js']

def get_content_security_policy(self) -> str:
policy = {
"default-src": "'self'",
Expand Down Expand Up @@ -236,21 +282,14 @@ def load_file(self, filename: str) -> Document:

return Document(path.open('rb'), content_type, content_encoding, content_security_policy)

def load_translation(self, locales: List[str]) -> Document:
def load_translation(self, path: str, locales: Sequence[str]) -> Document:
self.ensure_scanned()
assert self.translations is not None

# First check the locales that the user sent
# First match wins
for locale in locales:
with contextlib.suppress(KeyError):
return self.load_file(self.translations[locale])

# Next, check the language-only versions of variant-specified locales
for locale in locales:
language, _, region = locale.partition('-')
if region:
with contextlib.suppress(KeyError):
return self.load_file(self.translations[language])
return self.load_file(self.translations[path][locale])

# We prefer to return an empty document than 404 in order to avoid
# errors in the console when a translation can't be found
Expand All @@ -261,9 +300,9 @@ def load_path(self, path: str, headers: JsonObject) -> Document:
assert self.files is not None
assert self.translations is not None

if path == 'po.js':
locales = parse_accept_language(headers)
return self.load_translation(locales)
if path in self.translations:
locales = parse_accept_language(get_str(headers, 'Accept-Language', ''))
return self.load_translation(path, locales)
else:
return self.load_file(self.files[path])

Expand Down Expand Up @@ -462,8 +501,13 @@ def load(self) -> None:
def show(self):
for name in sorted(self.packages):
package = self.packages[name]
menuitems = ''
print(f'{name:20} {menuitems:40} {package.path}')
menuitems = []
for entry in itertools.chain(
package.manifest.get('menu', {}).values(),
package.manifest.get('tools', {}).values()):
with contextlib.suppress(KeyError):
menuitems.append(entry['label'])
print(f'{name:20} {", ".join(menuitems):40} {package.path}')

def get_bridge_configs(self) -> Sequence[BridgeConfig]:
def yield_configs():
Expand Down Expand Up @@ -492,17 +536,19 @@ def load_manifests_js(self, headers: JsonObject) -> Document:
chunks: List[bytes] = []

# Send the translations required for the manifest files, from each package
locales = parse_accept_language(headers)
locales = parse_accept_language(get_str(headers, 'Accept-Language', ''))
for name, package in self.packages.items():
if name in ['static', 'base1']:
continue

# find_translation will always find at least 'en'
translation = package.load_translation(locales)
translation = package.load_translation('po.manifest.js', locales)
with translation.data:
if translation.content_encoding == 'gzip':
data = gzip.decompress(translation.data.read())
else:
data = translation.data.read()

chunks.append(data)

chunks.append(b"""
Expand Down
Loading