Skip to content

Commit

Permalink
Release 0.5.0 (#305)
Browse files Browse the repository at this point in the history
  • Loading branch information
etiennebacher authored Jan 23, 2025
1 parent 588af2b commit 049c47a
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 29 deletions.
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Type: Package
Package: altdoc
Title: Package Documentation Websites with 'Quarto', 'Docsify', 'Docute', or 'MkDocs'
Version: 0.4.0.9000
Version: 0.5.0
Authors@R:
c(person(given = "Etienne",
family = "Bacher",
Expand Down
13 changes: 11 additions & 2 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,11 +1,20 @@
# News

## Development
## 0.5.0

Breaking changes:
### Breaking changes

* Do not render README.qmd to markdown automatically. Users should render them manually to make sure that the README on CRAN and Github is in sync with the Altdoc home page.

### Other changes

* `README.qmd` is no longer required to create a `quarto_website`, only
`README.md` (#295).

### Bug fixes

* Fix some failures in CRAN checks (#303).

## 0.4.0

### Breaking changes
Expand Down
65 changes: 41 additions & 24 deletions R/import_misc.R
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

.import_citation <- function(src_dir, tar_dir) {
src_file <- fs::path_join(c(src_dir, "CITATION.md"))
tar_file <- fs::path_join(c(tar_dir, "CITATION.md"))
Expand All @@ -14,32 +13,42 @@
{
name <- .pkg_name(src_dir)
cite <- utils::citation(name)
head <- vapply(cite, function(x) {
if (is.null(x$header)) {
""
} else {
paste0(x$header, "\n\n")
}
}, character(1))
head <- vapply(
cite,
function(x) {
if (is.null(x$header)) {
""
} else {
paste0(x$header, "\n\n")
}
},
character(1)
)
if (!is.null(attr(cite, "mheader"))) {
head[1] <- paste0(attr(cite, "mheader"), "\n\n", head[1])
}
cite <- paste0(head, format(cite, style = "html"))
c("# Citation", "", paste(cite, collapse = "\n\n"))
},
error = function(e) NULL)
error = function(e) NULL
)
)
if (!is.null(cite)) {
writeLines(cite, tar_file)
}
}
}


.import_basic <- function(src_dir, tar_dir, name = "NEWS") {
src <- c(
"NEWS.md", "NEWS.txt", "NEWS", "NEWS.Rd",
"inst/NEWS.md", "inst/NEWS.txt", "inst/NEWS", "inst/NEWS.Rd"
"NEWS.md",
"NEWS.txt",
"NEWS",
"NEWS.Rd",
"inst/NEWS.md",
"inst/NEWS.txt",
"inst/NEWS",
"inst/NEWS.Rd"
)
src <- gsub("NEWS", name, src, fixed = TRUE)
src <- sapply(src, function(x) fs::path_join(c(src_dir, x)))
Expand All @@ -48,7 +57,7 @@
# no news to import
if (length(src) == 0) {
return(invisible())
# priority hard-coded by the order of the vector above
# priority hard-coded by the order of the vector above
} else {
src <- src[1]
}
Expand All @@ -65,7 +74,7 @@
writeLines(gsub("^##", "#", x), tar)
}

# all other formats only require a copy
# all other formats only require a copy
} else {
fs::file_copy(src, tar, overwrite = TRUE)
}
Expand All @@ -78,7 +87,6 @@
cli::cli_alert_success("{.file {name}} imported.")
}


# Autolink news, PR, and people in NEWS
.parse_news <- function(path, news_path) {
if (!fs::file_exists(news_path)) {
Expand All @@ -97,34 +105,43 @@
)
new_news <- paste(orig_news, collapse = "\n")
if (length(issues_pr) > 0) {
issues_pr_link <- paste0(.gh_url(path), "/issues/", gsub("#", "", issues_pr))
issues_pr_link <- paste0(
.gh_url(path),
"/issues/",
gsub("#", "", issues_pr)
)

issues_pr_out <- data.frame(
in_text = issues_pr,
replacement = paste0("[", issues_pr, "](", issues_pr_link, ")"),
nchar = nchar(issues_pr)
) |>
unique()
)
issues_pr_out <- unique(issues_pr_out)

# need to go in decreasing order of characters so that we don't insert the
# link for #78 in "#783" for instance

issues_pr_out <- issues_pr_out[order(issues_pr_out$nchar, decreasing = TRUE), ]
issues_pr_out <- issues_pr_out[
order(issues_pr_out$nchar, decreasing = TRUE),
]

for (i in seq_len(nrow(issues_pr_out))) {
new_news <- gsub(paste0(issues_pr_out[i, "in_text"], "(?![0-9])"),
new_news <- gsub(
paste0(issues_pr_out[i, "in_text"], "(?![0-9])"),
issues_pr_out[i, "replacement"],
new_news,
perl = TRUE
)
}
}


### People
# regex from https://github.com/r-lib/pkgdown/blob/main/R/repo.R
people <- unlist(
regmatches(orig_news, gregexpr("(\\s|^|\\()@([-\\w]+)", orig_news, perl = TRUE))
regmatches(
orig_news,
gregexpr("(\\s|^|\\()@([-\\w]+)", orig_news, perl = TRUE)
)
)

people <- gsub("^ ", "", people)
Expand All @@ -138,8 +155,8 @@
in_text = people,
replacement = paste0("[", people, "](", people_link, ")"),
nchar = nchar(people)
) |>
unique()
)
people_out <- unique(people_out)

people_out <- people_out[order(people_out$nchar, decreasing = TRUE), ]

Expand Down
7 changes: 5 additions & 2 deletions cran-comments.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,8 @@

## Other comments

This is the second submission of altdoc 0.4.0. First submission failed because
of a dead link in the README, that is now fixed.
This release fixes the errors in CRAN checks occurring in 0.4.0

This is the second submission of 0.5.0. The first submission had a NOTE due
to the use of |> without specifying the R dependency version. This is now
fixed.

0 comments on commit 049c47a

Please sign in to comment.