Skip to content
1 change: 1 addition & 0 deletions src/basic_memory/cli/commands/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ async def get_sync_service(): # pragma: no cover

# Create sync service
sync_service = SyncService(
config=config,
entity_service=entity_service,
entity_parser=entity_parser,
entity_repository=entity_repository,
Expand Down
5 changes: 5 additions & 0 deletions src/basic_memory/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@ class ProjectConfig(BaseSettings):

log_level: str = "DEBUG"

update_permalinks_on_move: bool = Field(
default=False,
description="Whether to update permalinks when files are moved or renamed. default (False)",
)

model_config = SettingsConfigDict(
env_prefix="BASIC_MEMORY_",
extra="ignore",
Expand Down
3 changes: 3 additions & 0 deletions src/basic_memory/file_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,9 @@ def has_frontmatter(content: str) -> bool:
Returns:
True if content has valid frontmatter markers (---), False otherwise
"""
if not content:
return False

content = content.strip()
if not content.startswith("---"):
return False
Expand Down
20 changes: 12 additions & 8 deletions src/basic_memory/markdown/entity_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,32 +92,36 @@ def parse_date(self, value: Any) -> Optional[datetime]:
async def parse_file(self, path: Path | str) -> EntityMarkdown:
"""Parse markdown file into EntityMarkdown."""

# TODO move to api endpoint to check if absolute path was requested
# Check if the path is already absolute
if isinstance(path, Path) and path.is_absolute() or (isinstance(path, str) and Path(path).is_absolute()):
if (
isinstance(path, Path)
and path.is_absolute()
or (isinstance(path, str) and Path(path).is_absolute())
):
absolute_path = Path(path)
else:
absolute_path = self.base_path / path

# Parse frontmatter and content using python-frontmatter
post = frontmatter.load(str(absolute_path))
file_content = absolute_path.read_text()
return await self.parse_file_content(absolute_path, file_content)

async def parse_file_content(self, absolute_path, file_content):
post = frontmatter.loads(file_content)
# Extract file stat info
file_stats = absolute_path.stat()

metadata = post.metadata
metadata["title"] = post.metadata.get("title", absolute_path.name)
metadata["title"] = post.metadata.get("title", absolute_path.stem)
metadata["type"] = post.metadata.get("type", "note")
tags = parse_tags(post.metadata.get("tags", [])) # pyright: ignore
if tags:
metadata["tags"] = tags

# frontmatter
entity_frontmatter = EntityFrontmatter(
metadata=post.metadata,
)

entity_content = parse(post.content)

return EntityMarkdown(
frontmatter=entity_frontmatter,
content=post.content,
Expand Down
22 changes: 12 additions & 10 deletions src/basic_memory/markdown/utils.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
"""Utilities for converting between markdown and entity models."""

from pathlib import Path
from typing import Optional, Any
from typing import Any, Optional

from frontmatter import Post

from basic_memory.file_utils import has_frontmatter, remove_frontmatter
from basic_memory.markdown import EntityMarkdown
from basic_memory.models import Entity, Observation as ObservationModel
from basic_memory.utils import generate_permalink
from basic_memory.models import Entity
from basic_memory.models import Observation as ObservationModel


def entity_model_from_markdown(
Expand All @@ -32,16 +32,13 @@ def entity_model_from_markdown(
if not markdown.created or not markdown.modified: # pragma: no cover
raise ValueError("Both created and modified dates are required in markdown")

# Generate permalink if not provided
permalink = markdown.frontmatter.permalink or generate_permalink(file_path)

# Create or update entity
model = entity or Entity()

# Update basic fields
model.title = markdown.frontmatter.title
model.entity_type = markdown.frontmatter.type
model.permalink = permalink
model.permalink = markdown.frontmatter.permalink
model.file_path = str(file_path)
model.content_type = "text/markdown"
model.created_at = markdown.created
Expand Down Expand Up @@ -87,12 +84,17 @@ async def schema_to_markdown(schema: Any) -> Post:
for field in ["type", "title", "permalink"]:
frontmatter_metadata.pop(field, None)

# Create Post with ordered fields
# Create Post with fields ordered by insert order
post = Post(
content,
title=schema.title,
type=schema.entity_type,
permalink=schema.permalink,
**frontmatter_metadata,
)
# set the permalink if passed in
if schema.permalink:
post.metadata["permalink"] = schema.permalink

if frontmatter_metadata:
post.metadata.update(frontmatter_metadata)

return post
4 changes: 3 additions & 1 deletion src/basic_memory/mcp/tools/write_note.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,10 @@ async def write_note(
# Format semantic summary based on status code
action = "Created" if response.status_code == 201 else "Updated"
summary = [
f"# {action} {result.file_path} ({result.checksum[:8] if result.checksum else 'unknown'})",
f"# {action} note",
f"file_path: {result.file_path}",
f"permalink: {result.permalink}",
f"checksum: {result.checksum[:8] if result.checksum else 'unknown'}",
]

# Count observations by category
Expand Down
18 changes: 9 additions & 9 deletions src/basic_memory/services/entity_service.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,24 @@
"""Service for managing entities in the database."""

from pathlib import Path
from typing import Sequence, List, Optional, Tuple, Union
from typing import List, Optional, Sequence, Tuple, Union

import frontmatter
from loguru import logger
from sqlalchemy.exc import IntegrityError

from basic_memory.markdown import EntityMarkdown
from basic_memory.markdown.entity_parser import EntityParser
from basic_memory.markdown.utils import entity_model_from_markdown, schema_to_markdown
from basic_memory.models import Entity as EntityModel, Observation, Relation
from basic_memory.models import Entity as EntityModel
from basic_memory.models import Observation, Relation
from basic_memory.repository import ObservationRepository, RelationRepository
from basic_memory.repository.entity_repository import EntityRepository
from basic_memory.schemas import Entity as EntitySchema
from basic_memory.schemas.base import Permalink
from basic_memory.services.exceptions import EntityNotFoundError, EntityCreationError
from basic_memory.services import FileService
from basic_memory.services import BaseService
from basic_memory.services import BaseService, FileService
from basic_memory.services.exceptions import EntityCreationError, EntityNotFoundError
from basic_memory.services.link_resolver import LinkResolver
from basic_memory.markdown.entity_parser import EntityParser
from basic_memory.utils import generate_permalink


Expand Down Expand Up @@ -89,7 +89,7 @@ async def create_or_update_entity(self, schema: EntitySchema) -> Tuple[EntityMod
logger.debug(f"Creating or updating entity: {schema}")

# Try to find existing entity using smart resolution
existing = await self.link_resolver.resolve_link(schema.permalink)
existing = await self.link_resolver.resolve_link(schema.permalink or schema.file_path)

if existing:
logger.debug(f"Found existing entity: {existing.permalink}")
Expand All @@ -100,7 +100,7 @@ async def create_or_update_entity(self, schema: EntitySchema) -> Tuple[EntityMod

async def create_entity(self, schema: EntitySchema) -> EntityModel:
"""Create a new entity and write to filesystem."""
logger.debug(f"Creating entity: {schema.permalink}")
logger.debug(f"Creating entity: {schema.title}")

# Get file path and ensure it's a Path object
file_path = Path(schema.file_path)
Expand Down Expand Up @@ -230,7 +230,7 @@ async def create_entity_from_markdown(
Creates the entity with null checksum to indicate sync not complete.
Relations will be added in second pass.
"""
logger.debug(f"Creating entity: {markdown.frontmatter.title}")
logger.debug(f"Creating entity: {markdown.frontmatter.title} file_path: {file_path}")
model = entity_model_from_markdown(file_path, markdown)

# Mark as incomplete because we still need to add relations
Expand Down
26 changes: 3 additions & 23 deletions src/basic_memory/services/search_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,17 +181,6 @@ async def index_entity_markdown(
Each type gets its own row in the search index with appropriate metadata.
"""

if entity.permalink is None: # pragma: no cover
logger.error(
"Missing permalink for markdown entity",
entity_id=entity.id,
title=entity.title,
file_path=entity.file_path,
)
raise ValueError(
f"Entity permalink should not be None for markdown entity: {entity.id} ({entity.title})"
)

content_stems = []
content_snippet = ""
title_variants = self._generate_variants(entity.title)
Expand All @@ -202,22 +191,13 @@ async def index_entity_markdown(
content_stems.append(content)
content_snippet = f"{content[:250]}"

content_stems.extend(self._generate_variants(entity.permalink))
if entity.permalink:
content_stems.extend(self._generate_variants(entity.permalink))

content_stems.extend(self._generate_variants(entity.file_path))

entity_content_stems = "\n".join(p for p in content_stems if p and p.strip())

if entity.permalink is None: # pragma: no cover
logger.error(
"Missing permalink for markdown entity",
entity_id=entity.id,
title=entity.title,
file_path=entity.file_path,
)
raise ValueError(
f"Entity permalink should not be None for markdown entity: {entity.id} ({entity.title})"
)

# Index entity
await self.repository.index_item(
SearchIndexRow(
Expand Down
78 changes: 54 additions & 24 deletions src/basic_memory/sync/sync_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from loguru import logger
from sqlalchemy.exc import IntegrityError

from basic_memory.config import ProjectConfig
from basic_memory.file_utils import has_frontmatter
from basic_memory.markdown import EntityParser
from basic_memory.models import Entity
from basic_memory.repository import EntityRepository, RelationRepository
Expand Down Expand Up @@ -65,13 +67,15 @@ class SyncService:

def __init__(
self,
config: ProjectConfig,
entity_service: EntityService,
entity_parser: EntityParser,
entity_repository: EntityRepository,
relation_repository: RelationRepository,
search_service: SearchService,
file_service: FileService,
):
self.config = config
self.entity_service = entity_service
self.entity_parser = entity_parser
self.entity_repository = entity_repository
Expand Down Expand Up @@ -327,36 +331,40 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona
"""
# Parse markdown first to get any existing permalink
logger.debug("Parsing markdown file", path=path)
entity_markdown = await self.entity_parser.parse_file(path)

# Resolve permalink - this handles all the cases including conflicts
permalink = await self.entity_service.resolve_permalink(path, markdown=entity_markdown)
file_path = self.entity_parser.base_path / path
file_content = file_path.read_text()
file_contains_frontmatter = has_frontmatter(file_content)

# If permalink changed, update the file
if permalink != entity_markdown.frontmatter.permalink:
logger.info(
"Updating permalink",
path=path,
old_permalink=entity_markdown.frontmatter.permalink,
new_permalink=permalink,
)
# entity markdown will always contain front matter, so it can be used up create/update the entity
entity_markdown = await self.entity_parser.parse_file(path)

entity_markdown.frontmatter.metadata["permalink"] = permalink
checksum = await self.file_service.update_frontmatter(path, {"permalink": permalink})
else:
checksum = await self.file_service.compute_checksum(path)
# if the file contains frontmatter, resolve a permalink
if file_contains_frontmatter:
# Resolve permalink - this handles all the cases including conflicts
permalink = await self.entity_service.resolve_permalink(path, markdown=entity_markdown)

# If permalink changed, update the file
if permalink != entity_markdown.frontmatter.permalink:
logger.info(
"Updating permalink",
path=path,
old_permalink=entity_markdown.frontmatter.permalink,
new_permalink=permalink,
)

entity_markdown.frontmatter.metadata["permalink"] = permalink
await self.file_service.update_frontmatter(path, {"permalink": permalink})

# if the file is new, create an entity
if new:
# Create entity with final permalink
logger.debug("Creating new entity from markdown", path=path, permalink=permalink)

logger.debug("Creating new entity from markdown", path=path)
await self.entity_service.create_entity_from_markdown(Path(path), entity_markdown)

# otherwise we need to update the entity and observations
else:
logger.debug("Updating entity from markdown", path=path, permalink=permalink)

logger.debug("Updating entity from markdown", path=path)
await self.entity_service.update_entity_and_observations(Path(path), entity_markdown)

# Update relations and search index
Expand All @@ -366,10 +374,10 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona
# This is necessary for files with wikilinks to ensure consistent checksums
# after relation processing is complete
final_checksum = await self.file_service.compute_checksum(path)

# set checksum
await self.entity_repository.update(entity.id, {"checksum": final_checksum})

logger.debug(
"Markdown sync completed",
path=path,
Expand All @@ -378,7 +386,7 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona
relation_count=len(entity.relations),
checksum=final_checksum,
)

# Return the final checksum to ensure everything is consistent
return entity, final_checksum

Expand Down Expand Up @@ -475,8 +483,30 @@ async def handle_move(self, old_path, new_path):

entity = await self.entity_repository.get_by_file_path(old_path)
if entity:
# Update file_path but keep the same permalink for link stability
updated = await self.entity_repository.update(entity.id, {"file_path": new_path})
# Update file_path in all cases
updates = {"file_path": new_path}

# If configured, also update permalink to match new path
if self.config.update_permalinks_on_move:
# generate new permalink value
new_permalink = await self.entity_service.resolve_permalink(new_path)

# write to file and get new checksum
new_checksum = await self.file_service.update_frontmatter(
new_path, {"permalink": new_permalink}
)

updates["permalink"] = new_permalink
updates["checksum"] = new_checksum

logger.info(
"Updating permalink on move",
old_permalink=entity.permalink,
new_permalink=new_permalink,
new_checksum=new_checksum,
)

updated = await self.entity_repository.update(entity.id, updates)

if updated is None: # pragma: no cover
logger.error(
Expand Down
Loading
Loading