"src/vscode:/vscode.git/clone" did not exist on "43e08c6afa00a536fc50bd81a667bc935e1d5fae"
Commit 89e60e48 authored by wanglch's avatar wanglch
Browse files

Initial commit

parents
Pipeline #2484 canceled with stages
from marker.converters.pdf import PdfConverter
from marker.models import create_model_dict
from marker.output import text_from_rendered
from pypdf import PdfReader, PdfWriter
import os
import tempfile
_marker_converter = None
def run_marker(pdf_path: str, page_num: int = 1) -> str:
global _marker_converter
if _marker_converter is None:
# Create a configuration dictionary with the necessary settings
config = {
"texify_inline_spans": True, # This enables conversion of inline math to LaTeX
}
_marker_converter = PdfConverter(
artifact_dict=create_model_dict(),
config=config
)
# Extract the specific page from the PDF
pdf_to_process = pdf_path
temp_file = None
if page_num > 0: # If a specific page is requested
reader = PdfReader(pdf_path)
# Check if the requested page exists
if page_num > len(reader.pages):
raise ValueError(f"Page {page_num} does not exist in the PDF. PDF has {len(reader.pages)} pages.")
# Create a new PDF with just the requested page
writer = PdfWriter()
# pypdf uses 0-based indexing, so subtract 1 from page_num
writer.add_page(reader.pages[page_num - 1])
# Save the extracted page to a temporary file
temp_file = tempfile.NamedTemporaryFile(suffix='.pdf', delete=False)
temp_file.close() # Close the file but keep the name
with open(temp_file.name, 'wb') as output_pdf:
writer.write(output_pdf)
pdf_to_process = temp_file.name
try:
# Process the PDF (either original or single-page extract)
rendered = _marker_converter(pdf_to_process)
text, _, images = text_from_rendered(rendered)
return text
finally:
# Clean up the temporary file if it was created
if temp_file and os.path.exists(temp_file.name):
os.unlink(temp_file.name)
\ No newline at end of file
import os
import tempfile
from magic_pdf.config.enums import SupportedPdfParseMethod
from magic_pdf.data.data_reader_writer import FileBasedDataReader, FileBasedDataWriter
from magic_pdf.data.dataset import PymuDocDataset
from magic_pdf.model.doc_analyze_by_custom_model import doc_analyze
from pypdf import PdfReader, PdfWriter
def run_mineru(pdf_path: str, page_num: int = 1) -> str:
output_folder = tempfile.TemporaryDirectory()
image_output_folder = tempfile.TemporaryDirectory()
# Initialize writers (same for all PDFs)
image_writer = FileBasedDataWriter(image_output_folder.name)
md_writer = FileBasedDataWriter(output_folder.name)
if page_num > 0: # If a specific page is requested
reader = PdfReader(pdf_path)
# Check if the requested page exists
if page_num > len(reader.pages):
raise ValueError(f"Page {page_num} does not exist in the PDF. PDF has {len(reader.pages)} pages.")
# Create a new PDF with just the requested page
writer = PdfWriter()
# pypdf uses 0-based indexing, so subtract 1 from page_num
writer.add_page(reader.pages[page_num - 1])
# Save the extracted page to a temporary file
temp_file = tempfile.NamedTemporaryFile(suffix='.pdf', delete=False)
temp_file.close() # Close the file but keep the name
with open(temp_file.name, 'wb') as output_pdf:
writer.write(output_pdf)
pdf_to_process = temp_file.name
else:
pdf_to_process = pdf_path
try:
# Read the PDF file bytes
reader = FileBasedDataReader("")
pdf_bytes = reader.read(pdf_to_process)
# Create dataset instance
ds = PymuDocDataset(pdf_bytes)
# Inference: decide whether to run OCR mode based on dataset classification
if ds.classify() == SupportedPdfParseMethod.OCR:
infer_result = ds.apply(doc_analyze, ocr=True)
pipe_result = infer_result.pipe_ocr_mode(image_writer)
else:
infer_result = ds.apply(doc_analyze, ocr=False)
pipe_result = infer_result.pipe_txt_mode(image_writer)
# Generate markdown content; the image directory is the basename of the images output folder
image_dir_basename = os.path.basename(image_output_folder.name)
# md_content = pipe_result.get_markdown(image_dir_basename)
# Dump markdown file
with tempfile.NamedTemporaryFile("w+", suffix="md") as tf:
pipe_result.dump_md(md_writer, tf.name, image_dir_basename)
tf.flush()
tf.seek(0)
md_data = tf.read()
return md_data
finally:
# Clean up the temporary file if it was created
if temp_file and os.path.exists(temp_file.name):
os.unlink(temp_file.name)
\ No newline at end of file
import json
import os
import tempfile
from mistralai import Mistral
from pypdf import PdfReader, PdfWriter
def run_mistral(pdf_path: str, page_num: int = 1) -> str:
"""
Convert page of a PDF file to markdown using the mistral OCR api
https://docs.mistral.ai/capabilities/document/
Args:
pdf_path (str): The local path to the PDF file.
Returns:
str: The OCR result in markdown format.
"""
if not os.getenv("MISTRAL_API_KEY"):
raise SystemExit("You must specify an MISTRAL_API_KEY")
api_key = os.environ["MISTRAL_API_KEY"]
client = Mistral(api_key=api_key)
if page_num > 0: # If a specific page is requested
reader = PdfReader(pdf_path)
# Check if the requested page exists
if page_num > len(reader.pages):
raise ValueError(f"Page {page_num} does not exist in the PDF. PDF has {len(reader.pages)} pages.")
# Create a new PDF with just the requested page
writer = PdfWriter()
# pypdf uses 0-based indexing, so subtract 1 from page_num
writer.add_page(reader.pages[page_num - 1])
# Save the extracted page to a temporary file
temp_file = tempfile.NamedTemporaryFile(suffix='.pdf', delete=False)
temp_file.close() # Close the file but keep the name
with open(temp_file.name, 'wb') as output_pdf:
writer.write(output_pdf)
pdf_to_process = temp_file.name
else:
pdf_to_process = pdf_path
try:
with open(pdf_to_process, "rb") as pf:
uploaded_pdf = client.files.upload(
file={
"file_name": os.path.basename(pdf_path),
"content": pf,
},
purpose="ocr"
)
signed_url = client.files.get_signed_url(file_id=uploaded_pdf.id)
ocr_response = client.ocr.process(
model="mistral-ocr-2503",
document={
"type": "document_url",
"document_url": signed_url.url,
}
)
client.files.delete(file_id=uploaded_pdf.id)
return ocr_response.pages[0].markdown
finally:
# Clean up the temporary file if it was created
if temp_file and os.path.exists(temp_file.name):
os.unlink(temp_file.name)
import asyncio
import logging
from dataclasses import dataclass
from typing import Optional
# Import necessary components from olmocr
from olmocr.pipeline import (
MetricsKeeper,
PageResult,
WorkerTracker,
process_page,
sglang_server_host,
sglang_server_ready
)
# Setup basic logging
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
logger = logging.getLogger("olmocr_runner")
# Basic configuration
@dataclass
class Args:
model: str = "allenai/olmOCR-7B-0225-preview"
model_chat_template: str = "qwen2-vl"
model_max_context: int = 8192
target_longest_image_dim: int = 1024
target_anchor_text_len: int = 6000
max_page_retries: int = 8
max_page_error_rate: float = 0.004
async def run_olmocr_pipeline(pdf_path: str, page_num: int = 1) -> Optional[str]:
"""
Process a single page of a PDF using the official olmocr pipeline's process_page function
Args:
pdf_path: Path to the PDF file
page_num: Page number to process (1-indexed)
Returns:
The extracted text from the page or None if processing failed
"""
# Ensure global variables are initialized
global metrics, tracker
if "metrics" not in globals() or metrics is None:
metrics = MetricsKeeper(window=60 * 5)
if "tracker" not in globals() or tracker is None:
tracker = WorkerTracker()
args = Args()
semaphore = asyncio.Semaphore(1)
worker_id = 0 # Using 0 as default worker ID
# Ensure server is running
_server_task = None
try:
await asyncio.wait_for(sglang_server_ready(), timeout=5)
logger.info("Using existing sglang server")
except Exception:
logger.info("Starting new sglang server")
_server_task = asyncio.create_task(sglang_server_host(args, semaphore))
await sglang_server_ready()
try:
# Process the page using the pipeline's process_page function
# Note: process_page expects both original path and local path
# In our case, we're using the same path for both
page_result: PageResult = await process_page(
args=args,
worker_id=worker_id,
pdf_orig_path=pdf_path,
pdf_local_path=pdf_path,
page_num=page_num
)
# Return the natural text from the response
if page_result and page_result.response:
return page_result.response.natural_text
return None
except Exception as e:
logger.error(f"Error processing page: {type(e).__name__} - {str(e)}")
return None
finally:
# We leave the server running for potential reuse
pass
async def main():
# Example usage
pdf_path = "your_pdf_path.pdf"
page_num = 1
result = await run_olmocr_pipeline(pdf_path, page_num)
if result:
print(f"Extracted text: {result[:200]}...") # Print first 200 chars
else:
print("Failed to extract text from the page")
if __name__ == "__main__":
asyncio.run(main())
\ No newline at end of file
import json
import os
from typing import Literal
import httpx
from olmocr.data.renderpdf import render_pdf_to_base64png
from olmocr.prompts.anchor import get_anchor_text
from olmocr.prompts.prompts import (
PageResponse,
build_openai_silver_data_prompt,
build_finetuning_prompt,
)
async def run_server(pdf_path: str, page_num: int = 1, server: str = "localhost:30000", model: str = "allenai/olmOCR-7B-0225-preview",
temperature: float = 0.1, target_longest_image_dim: int = 1024,
prompt_template: Literal["full", "finetune"]="finetune",
response_template: Literal["plain", "json"]="json") -> str:
"""
Convert page of a PDF file to markdown by calling a request
running against an openai compatible server.
You can use this for running against vllm, sglang, servers
as well as mixing and matching different model's.
It will only make one direct request, with no retries or error checking.
Returns:
str: The OCR result in markdown format.
"""
# Convert the first page of the PDF to a base64-encoded PNG image.
image_base64 = render_pdf_to_base64png(pdf_path, page_num=page_num, target_longest_image_dim=target_longest_image_dim)
anchor_text = get_anchor_text(pdf_path, page_num, pdf_engine="pdfreport")
if prompt_template == "full":
prompt = build_openai_silver_data_prompt(anchor_text)
else:
prompt = build_finetuning_prompt(anchor_text)
request = {
"model": model,
"messages":[
{
"role": "user",
"content": [
{"type": "text", "text": prompt},
{"type": "image_url", "image_url": {"url": f"data:image/png;base64,{image_base64}"}},
],
}
],
"temperature": temperature,
"max_tokens": 3000,
}
# Make request and get response using httpx
url = f"http://{server}/v1/chat/completions"
async with httpx.AsyncClient(timeout=300) as client:
response = await client.post(url, json=request)
response.raise_for_status()
data = response.json()
choice = data["choices"][0]
assert choice["finish_reason"] == "stop", "Response from server did not finish with finish_reason stop as expected, this is probably going to lead to bad data"
if response_template == "json":
page_data = json.loads(choice["message"]["content"])
page_response = PageResponse(**page_data)
return page_response.natural_text
elif response_template == "plain":
return choice["message"]["content"]
Master - 7 1/4 - 36"
Master Bath - 7 1/4 - 30"
Laundry - 4 3/4 - 36"
Bath - 7 1/4 - 24"
MUD - 7 - 36"
UTIL - 8 1/4 - 36"
Down Bath - 7 1/4 - 32"
But Kit - 6 3/4 - 30"
Pantry - 4 3/8 - 24"
1/2 Guest - 22 9/8 - 32"
1/2 Guest Bath 5" - 24'
\ No newline at end of file
Table 4: Baseline model performance on each of the three scoring metrics (*task completion, task process, explanatory knowledge discovery*) across all 24 DISCOVERY WORLD tasks. Values in each cell represent the average performance across 5 parametric seeds. Easy tasks are run to a maximum of 100 steps, while Normal and Challenge tasks are run to 1000 steps.
| # | Topic | Task | ReACT | Plan+Execute | Hypothesizer |
|----|----------------|---------------------------------------|-------|--------------|--------------|
| 1 | Proteomics | Clustering (2D) | 0.87 | 0.20 | 0.00 |
| 2 | | Simplified Clustering | 0.88 | 0.40 | 0.00 |
| 3 | | Clustering (3D) | 0.88 | 0.40 | 0.60 |
| 4 | Chemistry | Single substances | 0.87 | 1.00 | 0.00 |
| 5 | | Mix of 3 substances | 0.82 | 0.00 | 0.00 |
| 6 | Archaeology | Correlations | 0.87 | 0.00 | 0.00 |
| 7 | | Simple instrument | 0.87 | 0.60 | 0.00 |
| 8 | Reactor Lab | Linear regression | 0.42 | 0.00 | 0.00 |
| 9 | | Quadratic regression | 0.43 | 0.00 | 0.20 |
| 10 | Plant Nutrients| Uncovering systems of rules | 0.51 | 0.00 | 0.00 |
| 11 | Space Sick | Presence rules | 0.91 | 0.60 | 0.00 |
| 12 | | Legal rules | 0.00 | 0.00 | 0.00 |
| 13 | Rocket Science | Open-ended discovery | 0.78 | 0.60 | 0.00 |
| 14 | Translation | Rosetta-stone style linguistic discovery| 0.30 | 0.40 | 0.00 |
| 15 | | Noun and verb | 0.49 | 0.00 | 0.00 |
| 16 | | Noun, adj., and verb | 0.49 | 0.00 | 0.00 |
| | **Average (Easy)** | | 0.59 | 0.23 | 0.05 |
| | **Average (Normal)** | | 0.09 | 0.06 | 0.14 |
| | **Average (Challenge)** | | 0.63 | 0.18 | 0.10 |
Table 5: Baseline model performance on each of the three scoring metrics (*task completion, task process, explanatory knowledge discovery*) across all 10 unit test tasks. Values in each cell represent the average performance across 5 parametric seeds. Unit tests tasks are run to a maximum of 100 steps.
| # | Unit Test Topic | ReACT | Plan+Execute | Hypothesizer |
|----|-----------------|-------|--------------|--------------|
| 25 | Multi-turn dialog with an agent | 1.00 | 1.00 | 1.00 |
| 26 | Measure an object with an instrument | 0.87 | 0.60 | 0.73 |
| 27 | Pick-and-place object | 0.90 | 0.80 | 0.80 |
| 28 | Pick-and-place object | 1.00 | 1.00 | 1.00 |
| 29 | Read DiscoveryFeed posts | 1.00 | 1.00 | 1.00 |
| 30 | Move through doors | 0.55 | 0.20 | 0.25 |
| 31 | Using keys with doors | 0.60 | 0.20 | 0.25 |
| 32 | Navigate to a specific room in a house | 0.20 | 0.00 | 0.00 |
| 33 | Search an environment for an object | 0.80 | 0.20 | 0.00 |
| 34 | Interact with a moving agent | 0.80 | 0.20 | 0.53 |
| | **Average (Unit Tests)** | 0.76 | 0.60 | 0.66 |
4.2 Baseline Agent Models
The baseline agents are described below, with model performance on Discovery tasks shown in Table 4, and performance on Unit Tests shown in Table 5. We use the GPT-4 model for all our agents due to its higher performance and lower cost compared to other models. For space we provide...
\ No newline at end of file
Recently Issued Accounting Pronouncements
Recently Adopted Accounting Pronouncement
In November 2023, the Financial Accounting Standards Board, or FASB, issued a new accounting standard requiring disclosures of significant expenses in operating segments. We adopted this standard in our fiscal year 2025 annual report. Refer to Note 16 of the Notes to the Consolidated Financial Statements in Part IV, Item 15 of this Annual Report on Form 10-K for further information.
Recent Accounting Pronouncements Not Yet Adopted
In December 2023, the FASB issued a new accounting standard which includes new and updated income tax disclosures, including disaggregation of information in the rate reconciliation and income taxes paid. We expect to adopt this standard in our fiscal year 2026 annual report. We do not expect the adoption of this standard to have a material impact on our Consolidated Financial Statements other than additional disclosures.
In November 2024, the FASB issued a new accounting standard requiring disclosures of certain additional expense information on an annual and interim basis, including, among other items, the amounts of purchases of inventory, employee compensation, depreciation and intangible asset amortization included within each income statement expense caption, as applicable. We expect to adopt this standard in our fiscal year 2028 annual report. We do not expect the adoption of this standard to have a material impact on our Consolidated Financial Statements other than additional disclosures.
Note 2 - Business Combination
Termination of the Arm Share Purchase Agreement
In February 2022, NVIDIA and SoftBank Group Corp., or SoftBank, announced the termination of the Share Purchase Agreement whereby NVIDIA would have acquired Arm from SoftBank. The parties agreed to terminate it due to significant regulatory challenges preventing the completion of the transaction. We recorded an acquisition termination cost of $1.4 billion in fiscal year 2023 reflecting the write-off of the prepayment provided at signing.
Note 3 - Stock-Based Compensation
Stock-based compensation expense is associated with RSUs, PSUs, market-based PSUs, and our ESPP.
Consolidated Statements of Income include stock-based compensation expense, net of amounts capitalized into inventory and subsequently recognized to cost of revenue, as follows:
| Year Ended | Jan 26, 2025 | Jan 28, 2024 | Jan 29, 2023 |
|--------------------|-------------|-------------|-------------|
| | (In millions) | | |
| Cost of revenue | $178 | $141 | $138 |
| Research and development | $3,423 | $2,532 | $1,892 |
| Sales, general and administrative | $1,136 | $876 | $680 |
| Total | $4,737 | $3,549 | $2,710 |
Stock-based compensation capitalized in inventories was not significant during fiscal years 2025, 2024, and 2023.
\ No newline at end of file
Executive Mansion,
Washington City,
January 10th, 1864.
Major General Hitchcock, Commissioner of Exchanges,
is authorized and directed to offer Brigadier General Trimble,
now a prisoner of war in Fort McHenry, in exchange for
Major White, who is held as a prisoner at Richmond.
He is also directed to send forward the offer of exchange
by Henry M. Warfield, Esq. of Baltimore, under a flag
of truce, and give him a pass to City Point.
Abraham Lincoln.
\ No newline at end of file
**Proof.** Let $S$ be the generating set associated with $D$ as described in Proposition 2.5. By the circulant diagonalization theorem, the spectrum of $G_R(D) = \Gamma(R, S)$ is the multiset $\{\lambda_g\}_{g \in R}$ where
$$\lambda_g = \sum_{s \in S} \zeta_n^{\psi(gs)} = \sum_{i=1}^{k} \left[ \sum_{L_s, R_s = I_i} \zeta_n^{\psi(gs)} \right].$$
We remark that by Corollary 2.7, if $s \in R$ such that $Rs = I_i = Rx_i$ then $s$ has a unique representation of the form $s = \hat{u}x_i$ where $u \in (R/\text{Ann}_R(x_i))^\times$ and $\hat{u}$ is a fixed lift of $u$ to $R^\times$. With this presentation, we can write
$$\sum_{s, R_s = I_i} \zeta_n^{\psi(gs)} = \sum_{u \in (R/\text{Ann}_R(x_i))^\times} \zeta_n^{\psi(g\hat{u}x_i)} = \sum_{u \in (R/\text{Ann}_R(x_i))^\times} \zeta_n^{\psi_{x_i}(gu)} = c(g, R/\text{Ann}_R(x_i)).$$
Here we recall that $\psi_{x_i}$ is the induced linear functional on $R/\text{Ann}_R(x_i)$. We conclude that $\lambda_g = \sum_{i=1}^{k} c(g, R/\text{Ann}_R(x_i))$. \hfill \Box
The following corollary is simple yet important for our future work on perfect state transfers on gcd-graphs.
**Corollary 4.17.** Suppose that $g' = ug$ for some $u \in R^\times$. Then $\lambda_g = \lambda_{g'}$.
---
**Acknowledgements**
We thank the Department of Mathematics and Computer Science at Lake Forest College for their generous financial support through an Overleaf subscription. We also thank Ján Mináč for his constant encouragement and support.
---
**References**
1. Reza Akhtar, Megan Boggess, Tiffany Jackson-Henderson, Isidora Jiménez, Rachel Karpman, Amanda Kinzel, and Dan Pritikin, *On the unitary Cayley graph of a finite ring*, Electron. J. Combin. 16 (2009), no. 1, Research Paper 117, 13 pages.
2. Milan Bašić, Aleksandar Ilić, and Aleksandar Stamenković, *Maximal diameter of integral circulant graphs*, Information and Computation 301 (2024), 105208.
3. Maria Chudnovsky, Michal Cizek, Logan Crew, Ján Mináč, Tung T. Nguyen, Sophie Spirkl, and Nguyễn Duy Tân, *On prime Cayley graphs*, arXiv:2401.06062, to appear in Journal of Combinatorics (2024).
4. Thomas Honold, *Characterization of finite frobenius rings*, Archiv der Mathematik 76 (2001), no. 6, 406–415.
5. Irving Kaplansky, *Elementary divisors and modules*, Transactions of the American Mathematical Society 66 (1949), no. 2, 464–491.
6. Walter Klotz and Torsten Sander, *Some properties of unitary Cayley graphs*, The Electronic Journal of Combinatorics 14 (2007), no. 1, R45, 12 pages.
7. Erich Lamprecht, *Allgemeine theorie der Gaußschen Summen in endlichen kommutativen Ringen*, Mathematische Nachrichten 9 (1953), no. 3, 149–196.
8. Ján Mináč, Tung T Nguyen, and Nguyen Duy Tân, *Isomorphic gcd-graphs over polynomial rings*, arXiv preprint arXiv:2401.01768 (2024).
9. ———, *On the gcd graphs over polynomial rings*, arXiv preprint arXiv:2409.01929 (2024).
\ No newline at end of file
The 20 Most Important Mathematical Equations
A journey through the most elegant and influential formulas in mathematics
1. Euler's Identity
\[ e^{i\pi} + 1 = 0 \]
Connects five fundamental constants \((e, i, \pi, 1, 0)\), revealing the profound relationship between exponential functions and trigonometry.
2. Pythagorean Theorem
\[ a^2 + b^2 = c^2 \]
In right triangles, the hypotenuse squared equals the sum of the squares of the other sides. Cornerstone of geometry with applications in navigation and architecture.
3. The Fundamental Theorem of Calculus
\[ \int_{a}^{b} f(x) \, dx = F(b) - F(a) \]
Establishes that differentiation and integration are inverse operations. If \( F \) is an antiderivative of \( f \), the definite integral equals \( F(b) - F(a) \). Revolutionized mathematical problem-solving.
4. Maxwell's Equations
\[ \nabla \cdot \mathbf{E} = \frac{\rho}{\varepsilon_0} \]
\[ \nabla \cdot \mathbf{B} = 0 \]
\[ \nabla \times \mathbf{E} = -\frac{\partial \mathbf{B}}{\partial t} \]
\[ \nabla \times \mathbf{B} = \mu_0 \mathbf{J} + \mu_0 \varepsilon_0 \frac{\partial \mathbf{E}}{\partial t} \]
Unified electricity and magnetism as manifestations of the same force. Describes electromagnetic field behavior, predicting waves traveling at light speed. Enabled technologies from radio to smartphones.
\ No newline at end of file
The 20 Most Important Mathematical Equations
A journey through the most elegant and influential formulas in mathematics
1. Euler's Identity
\[ e^{i\pi} + 1 = 0 \]
Connects five fundamental constants \((e, i, \pi, 1, 0)\), revealing the profound relationship between exponential functions and trigonometry.
2. Pythagorean Theorem
\[ a^2 + b^2 = c^2 \]
In right triangles, the hypotenuse squared equals the sum of the squares of the other sides. Cornerstone of geometry with applications in navigation and architecture.
3. The Fundamental Theorem of Calculus
\[ \int_{a}^{b} f(x) \, dx = F(b) - F(a) \]
Establishes that differentiation and integration are inverse operations. If \( F \) is an antiderivative of \( f \), the definite integral equals \( F(b) - F(a) \). Revolutionized mathematical problem-solving.
4. Maxwell's Equations
\[ \nabla \cdot \mathbf{E} = \frac{\rho}{\varepsilon_0} \]
\[ \nabla \cdot \mathbf{B} = 0 \]
\[ \nabla \times \mathbf{E} = -\frac{\partial \mathbf{B}}{\partial t} \]
\[ \nabla \times \mathbf{B} = \mu_0 \mathbf{J} + \mu_0 \varepsilon_0 \frac{\partial \mathbf{E}}{\partial t} \]
Unified electricity and magnetism as manifestations of the same force. Describes electromagnetic field behavior, predicting waves traveling at light speed. Enabled technologies from radio to smartphones.
\ No newline at end of file
V-February Flow
Data Components:
Code:
The-Stack-V2
CodeText:
SE, whatever we've scraped
WebText:
HQDCLM
DATA MIXES
~85% Source Code
~10% CodeText
~ 5% Webtext
~ 85% The-stack-V2
~ 15% CodeText
~ 0% Webtext
~100% Source Code
\ No newline at end of file
V-February Flow
Data Components:
Code:
The-Stack-V2
CodeText:
SE, whatever we've scraped
WebText:
HQDCLM
DATA MIXES
~85% Source Code
~10% CodeText
~ 5% Webtext
~ 85% The-stack-V2
~ 15% CodeText
~ 0% Webtext
~100% Source Code
\ No newline at end of file
February Flow
Data Components:
Code:
The-Stack-V2
CodeText:
SE, whatever we've scraped
WebText:
HQOCLM
DATA MIXES
~85% Source Code
~10% CodeText
~ 5% Webtext
~ 85% The-stack-V2
~ 15% CodeText
~ 0% Webtext
~100% Source Code
\ No newline at end of file
Corporate social responsibility and the tobacco industry: hope or hype?
N Hirschhorn
Tobacco Control 2004;13:447–453. doi: 10.1136/tc.2003.006676
Corporate social responsibility (CSR) emerged from a realisation among transnational corporations of the need to account for and redress their adverse impact on society: specifically, on human rights, labour practices, and the environment. Two transnational tobacco companies have recently adopted CSR: Philip Morris, and British American Tobacco. This report explains the origins and theory behind CSR; examines internal company documents from Philip Morris showing the company’s deliberations on the matter, and the company’s perspective on its own behaviour; and reflects on whether marketing tobacco is antithetical to social responsibility.
Over the past three decades increasing pressure from non-governmental organisations (NGOs), governments and the United Nations, has required transnational corporations (TNCs) to examine and redress the adverse impact their businesses have on society and the environment. Many have responded by taking up what is known as “corporate social responsibility” (CSR); only recently have two major cigarette companies followed suit: Philip Morris (PM) and British American Tobacco (BAT). This report first provides the context and development of CSR; then, from internal company documents, examines how PM came to its own version. This paper examines whether a tobacco company espousing CSR should be judged simply as a corporate entity along standards of business ethics or as an irretrievably negative force in the realm of public health, thereby rendering CSR an oxymoron.
CORPORATE SOCIAL RESPONSIBILITY: THE CONTEXT
The term “corporate social responsibility” is in vogue at the moment but as a concept it is vague and means different things to different people.1
Some writers on CSR trace its American roots to the 19th century when large industries engaged in philanthropy and established great public institutions, a form of “noblesse oblige”. But the notion that corporations should be required to return more to society because of their impact on society was driven by pressures from the civil rights, peace, and environmental movements of the last half century.2 The unprecedented expansion of power and influence of TNCs over the past three decades has accelerated global trade and development, but also environmental damage and abuses of...
Abbreviations: ASH, Action on Smoking and Health; BAT, British American Tobacco; CERES, Coalition for Environmentally Responsible Economies; CSR, corporate social responsibility; DJSI, Dow Jones Sustainability Index; GCAC, Global Corporate Affairs Council; GRI, Global Reporting Initiative; MSA, Master Settlement Agreement; NGOs, non-governmental organisations; PM, Philip Morris; TNCs, transnational corporations; UNEP, United Nations Environment Program
stakeholders has occurred in other nations, with groups and individuals refusing to risk being appropriated into the industry’s public relations ambitions. It now looks like that with vigilance, tobacco control advocates can easily foment similar distaste in many areas of the business community. Our actions sought to denormalise the tobacco industry by disrupting its efforts to take its place alongside other industries—often with considerable social credit—in the hope that it might gain by association.
Tobacco industry posturing about its corporate responsibility can never hide the ugly consequences of its ongoing efforts to “work with all relevant stakeholders for the preservation of opportunities for informed adults to consume tobacco products” (translation: “we will build alliances with others who want to profit from tobacco use, to do all we can to counteract effective tobacco control”). BAT has 15.4% and Philip Morris 16.4% of the global cigarette market.6 With 4.9 million smokers currently dying from tobacco use each year, and the industry unblinkingly concurring that its products are addictive, this leaves BAT to argue why it should not be held to be largely accountable for the annual deaths of some 754 600 smokers, and Philip Morris some 803 600 smokers.
REFERENCES
1 British American Tobacco. Social Report. http://www.bat.com/204ap.
2 Wroe D. Tobacco ad campaign angers MPs. The Age (Melbourne) 2004; May 17 http://www.theage.com.au/articles/2004/05/16/1084646069771.html?oneclick=true.
3 Hirschhorn N. Corporate social responsibility and the tobacco industry: hope or hype? Tobacco Control 2004;13:447–453.
4 Ethical Corporation Asia 2004. Conference website. http://www.ethicalcorp.com/asia2004.
5 Chapman S, Shonstein S. Extreme corporate makeover: tobacco companies, corporate responsibility and the corruption of “ethics”. Globalink petition. http://petition.globalink.org/view.php?code=extreme.
6 Mackay J, Eriksen M. The tobacco atlas. Geneva: World Health Organization, 2002.
\ No newline at end of file
### Table 1: Composition of the pretraining data for OLMo 2
| Source | Type | Tokens | Words | Bytes | Docs |
|-------------------------|-------------------|--------|-------|--------|-------|
| DCLM-Baseline | Web pages | 3.71T | 3.32T | 21.32T | 2.95B |
| StarCoder | Code | 83.0B | 70.0B | 459B | 78.7M |
| peS2o | Academic papers | 58.6B | 51.1B | 413B | 38.8M |
| arXiv | STEM papers | 20.8B | 19.3B | 77.2B | 3.95M |
| OpenWebMath | Math web pages | 12.2B | 11.1B | 47.2B | 2.89M |
| Algebraic Stack | Math proofs code | 11.8B | 10.8B | 44.0B | 2.83M |
| Wikipedia & Wikibooks | Encyclopedic | 3.7B | 3.16B | 16.2B | 6.17M |
| **Total** | | 3.90T | 3.48T | 22.38T | 3.08B |
The **OLMo 2 1124 Mix** is composed of StarCoder (Li et al., 2023b; Kocetkov et al., 2022), peS2o (Soldaini and Lo, 2023), web text from DCLM (Li et al., 2024) and Wiki come from Dolma 1.7 (Soldaini et al., 2024). arXiv comes from Red-Pajama (Together AI, 2023), while OpenWebMath (Paster et al., 2023) and Algebraic Stack come from ProofPile II (Azerbayev et al., 2023).
#### 2.1.1 Pretraining data: OLMo 2 Mix 1124
The mix used for this stage is shown in Table 1. It consists of approximately 3.9 trillion tokens, with over 95% derived from web data. We refer to this set as **OLMo 2 Mix 1124**. This is the same pretraining data used in **OLMoE** (Muennighoff et al., 2024).
We combine data from DCLM (Li et al., 2024) and Dolma 1.7 (Soldaini et al., 2024). From DCLM, we use the "baseline 1.0" mix. From Dolma, we use the arXiv (Together AI, 2023), OpenWebMath (Paster et al., 2023), Algebraic Stack, peS2o (Soldaini and Lo, 2023), and Wikipedia subsets. arXiv, OpenWebMath, and Algebraic Stack were originally part of ProofPile II (Azerbayev et al., 2023).
Finally, we include code from StarCoder (Li et al., 2023b), which is derived from permissively-licensed repositories from GitHub (Kocetkov et al., 2022). In an attempt to include higher quality code, we remove any document from a repository with fewer than 2 stars on GitHub. Further, through manual inspection of this source, we found it to contain documents encoded in binary format or containing mostly numerical content; to remove them, we discarded documents whose most frequent word constitutes over 30% of the document, or whose top-2 most frequent words constitute over 50% of the document. To mitigate possible training loss spikes, we remove documents with repeated sequences of 32 or more n-grams. We report details and show effectiveness of this intervention in Section §3.1.
#### 2.1.2 Mid-training data: Dolmino Mix 1124
After the initial pretraining stage on mostly web data, we further train with a mixture of web data that has been more restrictively filtered for quality and a collection of domain-specific high quality data, much of which is synthetic. The purpose of this mixture is to imbue the model with math-centric skills and provide focused exposure to STEM references and high quality text. We generate several variants of this mixture, with varying sizes, but generally refer to this mixture as **Dolmino Mix 1124**. The base sources from which **Dolmino Mix 1124** is subsampled are described in Table 2. We refer the reader to Section §4 for a deep dive detailing our processes for experimenting and curating data for this mix.
\ No newline at end of file
3.4 EXERCISES
For the following exercises, the given functions represent the position of a particle traveling along a horizontal line.
a. Find the velocity and acceleration functions.
b. Determine the time intervals when the object is slowing down or speeding up.
150. \( s(t) = 2t^3 - 3t^2 - 12t + 8 \)
151. \( s(t) = 2t^3 - 15t^2 + 36t - 10 \)
152. \( s(t) = \frac{t}{1 + t^2} \)
153. A rocket is fired vertically upward from the ground. The distance \( s \) in feet that the rocket travels from the ground after \( t \) seconds is given by \( s(t) = -16t^2 + 560t \).
a. Find the velocity of the rocket 3 seconds after being fired.
b. Find the acceleration of the rocket 3 seconds after being fired.
154. A ball is thrown downward with a speed of 8 ft/s from the top of a 64-foot-tall building. After \( t \) seconds, its height above the ground is given by \( s(t) = -16t^2 - 8t + 64 \).
a. Determine how long it takes for the ball to hit the ground.
b. Determine the velocity of the ball when it hits the ground.
155. The position function \( s(t) = t^2 - 3t - 4 \) represents the position of the back of a car backing out of a driveway and then driving in a straight line, where \( s \) is in feet and \( t \) is in seconds. In this case, \( s(t) = 0 \) represents the time at which the back of the car is at the garage door, so \( s(0) = -4 \) is the starting position of the car, 4 feet inside the garage.
a. Determine the velocity of the car when \( s(t) = 0 \).
b. Determine the velocity of the car when \( s(t) = 14 \).
156. The position of a hummingbird flying along a straight line in \( t \) seconds is given by \( s(t) = 3t^3 - 7t \) meters.
a. Determine the velocity of the bird at \( t = 1 \) sec.
b. Determine the acceleration of the bird at \( t = 1 \) sec.
c. Determine the acceleration of the bird when the velocity equals 0.
157. A potato is launched vertically upward with an initial velocity of 100 ft/s from a potato gun at the top of an 85-foot-tall building. The distance in feet that the potato travels from the ground after \( t \) seconds is given by \( s(t) = -16t^2 + 100t + 85 \).
a. Find the velocity of the potato after 0.5 s and 5.75 s.
b. Find the speed of the potato at 0.5 s and 5.75 s.
c. Determine when the potato reaches its maximum height.
d. Find the acceleration of the potato at 0.5 s and 1.5 s.
e. Determine how long the potato is in the air.
f. Determine the velocity of the potato upon hitting the ground.
158. The position function \( s(t) = t^3 - 8t \) gives the position in miles of a freight train where east is the positive direction and \( t \) is measured in hours.
a. Determine the direction the train is traveling when \( s(t) = 0 \).
b. Determine the direction the train is traveling when \( a(t) = 0 \).
c. Determine the time intervals when the train is slowing down or speeding up.
159. The following graph shows the position \( y = s(t) \) of an object moving along a straight line.
![Graph of position function](image)
a. Use the graph of the position function to determine the time intervals when the velocity is positive, negative, or zero.
b. Sketch the graph of the velocity function.
c. Use the graph of the velocity function to determine the time intervals when the acceleration is positive, negative, or zero.
d. Determine the time intervals when the object is speeding up or slowing down.
\ No newline at end of file
{"pdf": "multi_column_miss.pdf", "page": 1, "id": "multi_column_miss_00", "type": "present", "text": "Corporate social responsibility and the tobacco industry: hope or hype?"}
{"pdf": "multi_column_miss.pdf", "page": 1, "id": "multi_column_miss_01", "type": "present", "text": "this leaves BAT to argue why it should not be held to be largely accountable for the annual deaths of some 754 600 smokers, and Philip Morris some 803 600 smokers."}
{"pdf": "multi_column_miss.pdf", "page": 1, "id": "multi_column_miss_02", "type": "present", "text": "The term \"corporate social responsibility\" is in vogue at the moment but as a concept it is vague and means different things to different people.", "max_diffs": 2}
{"pdf": "multi_column_miss.pdf", "page": 1, "id": "multi_column_miss_03", "type": "present", "text": "Over the past three decades increasing pressure from non-governmental"}
{"pdf": "multi_column_miss.pdf", "page": 1, "id": "multi_column_miss_04", "type": "absent", "text": "Downloaded from http://tobaccocontrol.bmj.com/"}
{"pdf": "multi_column_miss.pdf", "page": 1, "id": "multi_column_miss_10", "type": "order", "before": "Corporate social responsibility and the tobacco industry: hope or hype?", "after": "The unprecedented expansion of power and influence of TNCs over the past three decades has accelerated global trade and development, but also environmental damage and abuses of", "max_diffs": 2}
{"pdf": "multi_column_miss.pdf", "page": 1, "id": "multi_column_miss_11", "type": "order", "before": "It now looks like that with vigilance", "after": "this leaves BAT to argue why it should not be held to be largely accountable for the annual deaths", "max_diffs": 2}
{"pdf": "multi_column_miss.pdf", "page": 1, "id": "multi_column_miss_12", "type": "order", "before": "Corporate social responsibility (CSR) emerged from a realisation among transnational corporations", "after": " perspective on its own behaviour; and reflects on whether marketing tobacco is antithetical to social responsibility.", "max_diffs": 2}
{"pdf": "discoverworld_crazy_table4.pdf", "page": 1, "id": "discoverworld_crazy_table4_00", "type": "present", "text": "Table 4: Baseline model performance on each of the three scoring metrics"}
{"pdf": "discoverworld_crazy_table4.pdf", "page": 1, "id": "discoverworld_crazy_table4_01", "type": "present", "text": "Table 5: Baseline model performance on each of the three scoring metrics"}
{"pdf": "discoverworld_crazy_table4.pdf", "page": 1, "id": "discoverworld_crazy_table4_02", "type": "present", "text": "We use the GPT-4O model for all our agents due to its higher performance and lower cost compared to other models. For space we provide"}
{"pdf": "mattsnotes.pdf", "page": 1, "id": "mattsnotes_minediff_00", "type": "present", "checked": "verified", "text": "The-Stack-V2"}
{"pdf": "mattsnotes.pdf", "page": 1, "id": "mattsnotes_minediff_01", "type": "present", "checked": "verified", "text": "SE, whatever we've scraped"}
{"pdf": "mattsnotes.pdf", "page": 1, "id": "mattsnotes_minediff_02", "type": "present", "checked": "verified", "text": "HQ DCLM"}
{"pdf": "mattsnotes.pdf", "page": 2, "id": "mattsnotes_minediff_03", "type": "present", "checked": "verified", "text": "Order by repo"}
{"pdf": "mattsnotes.pdf", "page": 3, "id": "mattsnotes_minediff_04", "type": "present", "checked": "verified", "text": "ARCH + TRAINING"}
{"pdf": "buildingnotes.pdf", "page": 1, "id": "building_notes_00", "type": "present", "checked": "verified", "text": "Master Bath", "case_sensitive": false}
{"pdf": "buildingnotes.pdf", "page": 1, "id": "building_notes_01", "type": "present", "checked": "verified", "text": "Laundry", "case_sensitive": false}
{"pdf": "buildingnotes.pdf", "page": 1, "id": "building_notes_02", "type": "present", "checked": "verified", "text": "Guest Bath", "case_sensitive": false}
{"pdf": "lincoln_letter.pdf", "page": 1, "id": "lincoln_letter_minediff_00", "type": "present", "checked": "verified", "text": "January 10th 1864."}
{"pdf": "lincoln_letter.pdf", "page": 1, "id": "lincoln_letter_minediff_01", "type": "present", "checked": "verified", "text": "Major General Hitchcock, Commissioner of Exchanges, is authorized and directed to offer Brigadier General Trimble, now a prisoner of war in Fort McHenry, in exchange for Major White, who is held as a prisoner at Richmond."}
{"pdf": "lincoln_letter.pdf", "page": 1, "id": "lincoln_letter_minediff_03", "type": "present", "checked": "verified", "text": "He is also directed to send forward the offer of exchange by Henry M. Warfield, Esq. of Baltimore, under a flag of truce, and give him a pass to City Point."}
{"pdf": "openstax_caculus_pg_273.pdf", "page": 1, "id": "openstax_caculus_pg_273_minediff_02", "type": "present", "checked": "verified", "text": "Use the graph of the position function to determine the time intervals when the velocity is positive, negative, or zero."}
{"pdf": "openstax_caculus_pg_273.pdf", "page": 1, "id": "openstax_caculus_pg_273_minediff_03", "type": "present", "checked": "verified", "text": "Use the graph of the velocity function to determine the time intervals when the acceleration is positive, negative, or zero."}
{"pdf": "openstax_caculus_pg_273.pdf", "page": 1, "id": "openstax_caculus_pg_273_minediff_04", "type": "order", "before": "150.", "after": "157."}
{"pdf": "openstax_caculus_pg_273.pdf", "page": 1, "id": "openstax_caculus_pg_273_minediff_05", "type": "order", "before": "150.", "after": "158."}
{"pdf": "openstax_caculus_pg_273.pdf", "page": 1, "id": "openstax_caculus_pg_273_minediff_06", "type": "order", "before": "150.", "after": "159."}
{"pdf": "multi_column_miss.pdf", "page": 1, "id": "multi_column_miss_minediff_01", "type": "present", "checked": "verified", "text": "This report first provides the context and development of CSR; then, from internal company documents, examines how PM came to its own version."}
{"pdf": "multi_column_miss.pdf", "page": 1, "id": "multi_column_miss_minediff_02", "type": "present", "checked": "verified", "text": "This paper examines whether a tobacco company espousing CSR should be judged simply as a corporate entity along standards of business ethics, or as an irretrievably negative force in the realm of public health, thereby rendering CSR an oxymoron."}
{"pdf": "olmo2-pg4.pdf", "page": 1, "id": "olmo2-pg4_minediff_00", "type": "present", "checked": "verified", "text": "Table 1 Composition of the pretraining data for OLMo 2."}
{"pdf": "olmo2-pg4.pdf", "page": 1, "id": "olmo2-pg4_table00", "type": "table", "cell": "Type"}
{"pdf": "olmo2-pg4.pdf", "page": 1, "id": "olmo2-pg4_table01", "type": "table", "cell": "3.32T", "left": "3.71T"}
{"pdf": "olmo2-pg4.pdf", "page": 1, "id": "olmo2-pg4_table02", "type": "table", "cell": "3.32T", "right": "21.32T"}
{"pdf": "olmo2-pg4.pdf", "page": 1, "id": "olmo2-pg4_table03", "type": "table", "cell": "11.8B", "up": "12.2B"}
{"pdf": "olmo2-pg4.pdf", "page": 1, "id": "olmo2-pg4_table04", "type": "table", "cell": "11.8B", "down": "3.7B"}
{"pdf": "olmo2-pg4.pdf", "page": 1, "id": "olmo2-pg4_table05", "type": "table", "cell": "3.32T", "top_heading": "Words"}
{"pdf": "olmo2-pg4.pdf", "page": 1, "id": "olmo2-pg4_table06", "type": "table", "cell": "arXiv", "top_heading": "Source"}
{"pdf": "olmo2-pg4.pdf", "page": 1, "id": "olmo2-pg4_table07", "type": "table", "cell": "47.2B", "top_heading": "Bytes"}
{"pdf": "olmo2-pg4.pdf", "page": 1, "id": "olmo2-pg4_table08", "type": "table", "cell": "Math proofs code", "left_heading": "Algebraic Stack"}
{"pdf": "discoverworld_crazy_table4.pdf", "page": 1, "id": "olmo2-discoverworld_crazy_table4_t00", "type": "table", "cell": "Quadratic regression", "left": "Challenge"}
{"pdf": "discoverworld_crazy_table4.pdf", "page": 1, "id": "olmo2-discoverworld_crazy_table4_t01", "type": "table", "cell": "Instrument Use", "left": "Normal"}
{"pdf": "discoverworld_crazy_table4.pdf", "page": 1, "id": "olmo2-discoverworld_crazy_table4_t02", "type": "table", "cell": "0.87", "top_heading": "Procedure"}
{"pdf": "discoverworld_crazy_table4.pdf", "page": 1, "id": "olmo2-discoverworld_crazy_table4_t03", "type": "table", "cell": "0.87", "top_heading": "ReACT"}
{"pdf": "discoverworld_crazy_table4.pdf", "page": 1, "id": "olmo2-discoverworld_crazy_table4_t04", "type": "table", "cell": "Pick-and-place object", "left_heading": "27"}
{"pdf": "discoverworld_crazy_table4.pdf", "page": 1, "id": "olmo2-discoverworld_crazy_table4_t05", "type": "table", "cell": "0.66", "right": "0.44"}
{"pdf": "discoverworld_crazy_table4.pdf", "page": 1, "id": "olmo2-discoverworld_crazy_table4_t06", "type": "table", "cell": "Interact with a moving agent", "top_heading": "Unit Test Topic"}
{"pdf": "earnings.pdf", "page": 1, "id": "earnings_table00", "type": "table", "cell": "1,136", "top_heading": "Year Ended"}
{"pdf": "earnings.pdf", "page": 1, "id": "earnings_table01", "type": "table", "cell": "Year Ended"}
{"pdf": "earnings.pdf", "page": 1, "id": "earnings_table02", "type": "table", "cell": "680", "up": "1,892"}
{"pdf": "earnings.pdf", "page": 1, "id": "earnings_table03", "type": "table", "cell": "2,532", "left_heading": "Research and development"}
{"pdf": "earnings.pdf", "page": 1, "id": "earnings_table04", "type": "absent", "text": "62"}
{"pdf": "mathfuncs.pdf", "page": 1, "id": "mathfuncs_00", "type": "order", "before": "Euler's Identity", "after": "Pythagorean Theorem"}
{"pdf": "mathfuncs.pdf", "page": 1, "id": "mathfuncs_01", "type": "order", "before": "Pythagorean Theorem", "after": "The Fundamental Theorem of Calculus"}
{"pdf": "mathfuncs.pdf", "page": 1, "id": "mathfuncs_02", "type": "order", "before": "The Fundamental Theorem of Calculus", "after": "Maxwell's Equations"}
{"pdf": "mathfuncs.pdf", "page": 1, "id": "mathfuncs_03", "type": "math", "math": "e^{i \\pi}+1=0"}
{"pdf": "mathfuncs.pdf", "page": 1, "id": "mathfuncs_04", "type": "math", "math": "\\int_{a}^{b} f(x) d x=F(b)-F(a)"}
{"pdf": "mathfuncs.pdf", "page": 1, "id": "mathfuncs_05", "type": "math", "math": "a^{2}+b^{2}=c^{2}"}
{"pdf": "mathfuncs.pdf", "page": 1, "id": "mathfuncs_06", "type": "math", "math": "\\nabla \\times \\mathbf{E}=-\\frac{\\partial \\mathbf{B}}{\\partial t}"}
{"pdf": "mathfuncs_colswitch.pdf", "page": 1, "id": "mathfuncscol_00", "type": "order", "before": "Euler's Identity", "after": "Pythagorean Theorem"}
{"pdf": "mathfuncs_colswitch.pdf", "page": 1, "id": "mathfuncscol_01", "type": "order", "before": "Pythagorean Theorem", "after": "The Fundamental Theorem of Calculus"}
{"pdf": "mathfuncs_colswitch.pdf", "page": 1, "id": "mathfuncscol_02", "type": "order", "before": "The Fundamental Theorem of Calculus", "after": "Maxwell's Equations"}
{"pdf": "mathfuncs_colswitch.pdf", "page": 1, "id": "mathfuncscol_03", "type": "math", "math": "e^{i \\pi}+1=0"}
{"pdf": "mathfuncs_colswitch.pdf", "page": 1, "id": "mathfuncscol_04", "type": "math", "math": "\\int_{a}^{b} f(x) d x=F(b)-F(a)"}
{"pdf": "mathfuncs_colswitch.pdf", "page": 1, "id": "mathfuncscol_05", "type": "math", "math": "a^{2}+b^{2}=c^{2}"}
{"pdf": "mathfuncs_colswitch.pdf", "page": 1, "id": "mathfuncscol_06", "type": "math", "math": "\\nabla \\times \\mathbf{E}=-\\frac{\\partial \\mathbf{B}}{\\partial t}"}
{"pdf": "math_2503_04086.pdf", "page": 1, "id": "math_2503_04086_00", "type": "math", "math": "\\lambda_{g}=\\sum_{s \\in S} \\zeta_{n}^{\\psi(g s)}=\\sum_{i=1}^{k}\\left[\\sum_{s, R s=\\mathcal{I}_{i}} \\zeta_{n}^{\\psi(g s)}\\right]"}
{"pdf": "math_2503_04086.pdf", "page": 1, "id": "math_2503_04086_01", "type": "math", "math": "\\lambda_{g}=\\lambda_{g^{\\prime}}"}
{"pdf": "math_2503_04086.pdf", "page": 1, "id": "math_2503_04086_02", "type": "math", "math": "u \\in\\left(R / \\operatorname{Ann}_{R}\\left(x_{i}\\right)\\right)^{\\times}"}
{"pdf": "math_2503_04086.pdf", "page": 1, "id": "math_2503_04086_03", "type": "math", "math": "\\lambda_{g}=\\sum_{i=1}^{k} c\\left(g, R / \\operatorname{Ann}_{R}\\left(x_{i}\\right)\\right)"}
{"pdf": "math_2503_04086.pdf", "page": 1, "id": "math_2503_04086_04", "type": "present", "text": "We also thank Ján Mináč for his constant encouragement and support."}
{"pdf": "math_2503_04086.pdf", "page": 1, "id": "math_2503_04086_05", "type": "present", "text": "Allgemeine theorie der Gaußschen Summen in endlichen kommutativen Ringe"}
Master -7¼ - 36'
MasterBata -7¼ - 30"
Laundry -4¾ - 36"
Bath -7¼ - 24"
MUD -7 - 36"
UTIL - 8¼ - 36
DWN BATH -7¼ - 32
BUT KIT - 6¾ 30
PANTRY - 4¾ 24
2 GUEST - 324/8 32
5 GUEST BATH 5" 24"
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment